This is the name-lookup changes.
Namespace-scope name-lookup now has to contend with merging the bindings
from multiple visible modules, and ensuring said bindings are loaded (we
load them lazily)
nathan
--
Nathan Sidwell
diff --git c/gcc/cp/name-lookup.c w/gcc/cp/name-lookup.c
index 6a88e68c346..fc2323de95c 100644
--- c/gcc/cp/name-lookup.c
+++ w/gcc/cp/name-lookup.c
@@ -35,6 +35,8 @@ along with GCC; see the file COPYING3. If not see
#include "c-family/name-hint.h"
#include "c-family/known-headers.h"
#include "c-family/c-spellcheck.h"
+#include "bitmap.h"
+#include "intl.h"
static cxx_binding *cxx_binding_make (tree value, tree type);
static cp_binding_level *innermost_nonclass_level (void);
@@ -46,16 +48,28 @@ static name_hint maybe_suggest_missing_std_header (location_t location,
static name_hint suggest_alternatives_for_1 (location_t location, tree name,
bool suggest_misspellings);
+/* Slots in MODULE_VEC. */
+#define MODULE_SLOT_CURRENT 0 /* Slot for current TU. */
+#define MODULE_SLOT_GLOBAL 1 /* Slot for merged global module. */
+#define MODULE_SLOT_PARTITION 2 /* Slot for merged partition entities
+ (optional). */
+#define MODULE_SLOTS_FIXED 2 /* Number of always-allocated slots. */
+
/* Create an overload suitable for recording an artificial TYPE_DECL
and another decl. We use this machanism to implement the struct
- stat hack within a namespace. It'd be nice to use it everywhere. */
+ stat hack. */
#define STAT_HACK_P(N) ((N) && TREE_CODE (N) == OVERLOAD && OVL_LOOKUP_P (N))
+#define STAT_TYPE_VISIBLE_P(N) TREE_USED (OVERLOAD_CHECK (N))
#define STAT_TYPE(N) TREE_TYPE (N)
#define STAT_DECL(N) OVL_FUNCTION (N)
+#define STAT_VISIBLE(N) OVL_CHAIN (N)
#define MAYBE_STAT_DECL(N) (STAT_HACK_P (N) ? STAT_DECL (N) : N)
#define MAYBE_STAT_TYPE(N) (STAT_HACK_P (N) ? STAT_TYPE (N) : NULL_TREE)
+/* When a STAT_HACK_P is true, OVL_USING_P and OVL_EXPORT_P are valid
+ and apply to the hacked type. */
+
/* For regular (maybe) overloaded functions, we have OVL_HIDDEN_P.
But we also need to indicate hiddenness on implicit type decls
(injected friend classes), and (coming soon) decls injected from
@@ -116,7 +130,246 @@ find_namespace_value (tree ns, tree name)
return b ? MAYBE_STAT_DECL (*b) : NULL_TREE;
}
-/* Add DECL to the list of things declared in B. */
+/* Look in *SLOT for a the binding of NAME in imported module IX.
+ Returns pointer to binding's slot, or NULL if not found. Does a
+ binary search, as this is mainly used for random access during
+ importing. Do not use for the fixed slots. */
+
+static mc_slot *
+search_imported_binding_slot (tree *slot, unsigned ix)
+{
+ gcc_assert (ix);
+
+ if (!*slot)
+ return NULL;
+
+ if (TREE_CODE (*slot) != MODULE_VECTOR)
+ return NULL;
+
+ unsigned clusters = MODULE_VECTOR_NUM_CLUSTERS (*slot);
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (*slot);
+
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ clusters--;
+ cluster++;
+ }
+
+ while (clusters > 1)
+ {
+ unsigned half = clusters / 2;
+ gcc_checking_assert (cluster[half].indices[0].span);
+ if (cluster[half].indices[0].base > ix)
+ clusters = half;
+ else
+ {
+ clusters -= half;
+ cluster += half;
+ }
+ }
+
+ if (clusters)
+ /* Is it in this cluster? */
+ for (unsigned off = 0; off != MODULE_VECTOR_SLOTS_PER_CLUSTER; off++)
+ {
+ if (!cluster->indices[off].span)
+ break;
+ if (cluster->indices[off].base > ix)
+ break;
+
+ if (cluster->indices[off].base + cluster->indices[off].span > ix)
+ return &cluster->slots[off];
+ }
+
+ return NULL;
+}
+
+static void
+init_global_partition (module_cluster *cluster, tree decl)
+{
+ bool purview = true;
+
+ if (header_module_p ())
+ purview = false;
+ else if (TREE_PUBLIC (decl)
+ && TREE_CODE (decl) == NAMESPACE_DECL
+ && !DECL_NAMESPACE_ALIAS (decl))
+ purview = false;
+ else if (!get_originating_module (decl))
+ purview = false;
+
+ mc_slot *mslot;
+ if (!purview)
+ mslot = &cluster[0].slots[MODULE_SLOT_GLOBAL];
+ else
+ mslot = &cluster[MODULE_SLOT_PARTITION
+ / MODULE_VECTOR_SLOTS_PER_CLUSTER]
+ .slots[MODULE_SLOT_PARTITION
+ % MODULE_VECTOR_SLOTS_PER_CLUSTER];
+
+ if (*mslot)
+ decl = ovl_make (decl, *mslot);
+ *mslot = decl;
+
+ if (TREE_CODE (decl) == CONST_DECL)
+ {
+ tree type = TREE_TYPE (decl);
+ if (TREE_CODE (type) == ENUMERAL_TYPE
+ && IDENTIFIER_ANON_P (DECL_NAME (TYPE_NAME (type)))
+ && decl == TREE_VALUE (TYPE_VALUES (type)))
+ /* Anonymous enums are keyed by their first enumerator, put
+ the TYPE_DECL here too. */
+ *mslot = ovl_make (TYPE_NAME (type), *mslot);
+ }
+}
+
+/* Get the fixed binding slot IX. Creating the vector if CREATE is
+ non-zero. If CREATE is < 0, make sure there is at least 1 spare
+ slot for an import. (It is an error for CREATE < 0 and the slot to
+ already exist.) */
+
+static tree *
+get_fixed_binding_slot (tree *slot, tree name, unsigned ix, int create)
+{
+ gcc_checking_assert (ix <= MODULE_SLOT_PARTITION);
+
+ /* An assumption is that the fixed slots all reside in one cluster. */
+ gcc_checking_assert (MODULE_VECTOR_SLOTS_PER_CLUSTER >= MODULE_SLOTS_FIXED);
+
+ if (!*slot || TREE_CODE (*slot) != MODULE_VECTOR)
+ {
+ if (ix == MODULE_SLOT_CURRENT)
+ /* The current TU can just use slot directly. */
+ return slot;
+
+ if (!create)
+ return NULL;
+
+ /* The partition slot is only needed when we know we're a named
+ module. */
+ bool partition_slot = named_module_p ();
+ unsigned want = ((MODULE_SLOTS_FIXED + partition_slot + (create < 0)
+ + MODULE_VECTOR_SLOTS_PER_CLUSTER - 1)
+ / MODULE_VECTOR_SLOTS_PER_CLUSTER);
+ tree new_vec = make_module_vec (name, want);
+ MODULE_VECTOR_NUM_CLUSTERS (new_vec) = want;
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (new_vec);
+
+ /* Initialize the fixed slots. */
+ for (unsigned jx = MODULE_SLOTS_FIXED; jx--;)
+ {
+ cluster[0].indices[jx].base = 0;
+ cluster[0].indices[jx].span = 1;
+ cluster[0].slots[jx] = NULL_TREE;
+ }
+
+ if (partition_slot)
+ {
+ unsigned off = MODULE_SLOT_PARTITION % MODULE_VECTOR_SLOTS_PER_CLUSTER;
+ unsigned ind = MODULE_SLOT_PARTITION / MODULE_VECTOR_SLOTS_PER_CLUSTER;
+ cluster[ind].indices[off].base = 0;
+ cluster[ind].indices[off].span = 1;
+ cluster[ind].slots[off] = NULL_TREE;
+ }
+
+ if (tree orig = *slot)
+ {
+ /* Propagate existing value to current slot. */
+
+ /* Propagate global & module entities to the global and
+ partition slots. */
+ if (tree type = MAYBE_STAT_TYPE (orig))
+ init_global_partition (cluster, type);
+
+ for (ovl_iterator iter (MAYBE_STAT_DECL (orig)); iter; ++iter)
+ {
+ tree decl = *iter;
+
+ /* Internal linkage entities are in deduplicateable. */
+ init_global_partition (cluster, decl);
+ }
+
+ if (cluster[0].slots[MODULE_SLOT_GLOBAL]
+ && !(TREE_CODE (orig) == NAMESPACE_DECL
+ && !DECL_NAMESPACE_ALIAS (orig)))
+ {
+ /* Note that we had some GMF entries. */
+ if (!STAT_HACK_P (orig))
+ orig = stat_hack (orig);
+
+ MODULE_BINDING_GLOBAL_P (orig) = true;
+ }
+
+ cluster[0].slots[MODULE_SLOT_CURRENT] = orig;
+ }
+
+ *slot = new_vec;
+ }
+ else
+ gcc_checking_assert (create >= 0);
+
+ unsigned off = ix % MODULE_VECTOR_SLOTS_PER_CLUSTER;
+ module_cluster &cluster
+ = MODULE_VECTOR_CLUSTER (*slot, ix / MODULE_VECTOR_SLOTS_PER_CLUSTER);
+
+ /* There must always be slots for these indices */
+ gcc_checking_assert (cluster.indices[off].span == 1
+ && !cluster.indices[off].base
+ && !cluster.slots[off].is_lazy ());
+
+ return reinterpret_cast<tree *> (&cluster.slots[off]);
+}
+
+/* *SLOT is a namespace binding slot. Append a slot for imported
+ module IX. */
+
+static mc_slot *
+append_imported_binding_slot (tree *slot, tree name, unsigned ix)
+{
+ gcc_checking_assert (ix);
+
+ if (!*slot || TREE_CODE (*slot) != MODULE_VECTOR)
+ /* Make an initial module vector. */
+ get_fixed_binding_slot (slot, name, MODULE_SLOT_GLOBAL, -1);
+ else if (!MODULE_VECTOR_CLUSTER_LAST (*slot)
+ ->indices[MODULE_VECTOR_SLOTS_PER_CLUSTER - 1].span)
+ /* There is space in the last cluster. */;
+ else if (MODULE_VECTOR_NUM_CLUSTERS (*slot)
+ != MODULE_VECTOR_ALLOC_CLUSTERS (*slot))
+ /* There is space in the vector. */
+ MODULE_VECTOR_NUM_CLUSTERS (*slot)++;
+ else
+ {
+ /* Extend the vector. */
+ unsigned have = MODULE_VECTOR_NUM_CLUSTERS (*slot);
+ unsigned want = (have * 3 + 1) / 2;
+
+ if (want > (unsigned short)~0)
+ want = (unsigned short)~0;
+
+ tree new_vec = make_module_vec (name, want);
+ MODULE_VECTOR_NUM_CLUSTERS (new_vec) = have + 1;
+ memcpy (MODULE_VECTOR_CLUSTER_BASE (new_vec),
+ MODULE_VECTOR_CLUSTER_BASE (*slot),
+ have * sizeof (module_cluster));
+ *slot = new_vec;
+ }
+
+ module_cluster *last = MODULE_VECTOR_CLUSTER_LAST (*slot);
+ for (unsigned off = 0; off != MODULE_VECTOR_SLOTS_PER_CLUSTER; off++)
+ if (!last->indices[off].span)
+ {
+ /* Fill the free slot of the cluster. */
+ last->indices[off].base = ix;
+ last->indices[off].span = 1;
+ last->slots[off] = NULL_TREE;
+ return &last->slots[off];
+ }
+
+ gcc_unreachable ();
+}
+
+/* Add DECL to the list of things declared in binding level B. */
static void
add_decl_to_level (cp_binding_level *b, tree decl)
@@ -171,8 +424,13 @@ public:
public:
tree name; /* The identifier being looked for. */
+
+ /* Usually we just add things to the VALUE binding, but we record
+ (hidden) IMPLICIT_TYPEDEFs on the type binding, which is used for
+ using-decl resolution. */
tree value; /* A (possibly ambiguous) set of things found. */
tree type; /* A type that has been found. */
+
LOOK_want want; /* What kind of entity we want. */
bool deduping; /* Full deduping is needed because using declarations
@@ -238,7 +496,7 @@ private:
void add_value (tree new_val);
void add_type (tree new_type);
bool process_binding (tree val_bind, tree type_bind);
-
+ unsigned process_module_binding (tree val_bind, tree type_bind, unsigned);
/* Look in only namespace. */
bool search_namespace_only (tree scope);
/* Look in namespace and its (recursive) inlines. Ignore using
@@ -262,15 +520,16 @@ private:
private:
void add_fns (tree);
-
void adl_expr (tree);
void adl_type (tree);
void adl_template_arg (tree);
void adl_class (tree);
+ void adl_enum (tree);
void adl_bases (tree);
void adl_class_only (tree);
void adl_namespace (tree);
- void adl_namespace_only (tree);
+ void adl_class_fns (tree);
+ void adl_namespace_fns (tree, bitmap);
public:
/* Search namespace + inlines + maybe usings as qualified lookup. */
@@ -433,8 +692,8 @@ name_lookup::add_overload (tree fns)
if (probe && TREE_CODE (probe) == OVERLOAD
&& OVL_DEDUP_P (probe))
{
- /* We're about to add something found by a using
- declaration, so need to engage deduping mode. */
+ /* We're about to add something found by multiple paths, so
+ need to engage deduping mode. */
lookup_mark (value, true);
deduping = true;
}
@@ -540,36 +799,190 @@ name_lookup::process_binding (tree new_val, tree new_type)
return new_val != NULL_TREE;
}
+/* If we're importing a module containing this binding, add it to the
+ lookup set. The trickiness is with namespaces, we only want to
+ find it once. */
+
+unsigned
+name_lookup::process_module_binding (tree new_val, tree new_type,
+ unsigned marker)
+{
+ /* Optimize for (re-)finding a public namespace. We only need to
+ look once. */
+ if (new_val && !new_type
+ && TREE_CODE (new_val) == NAMESPACE_DECL
+ && TREE_PUBLIC (new_val)
+ && !DECL_NAMESPACE_ALIAS (new_val))
+ {
+ if (marker & 2)
+ return marker;
+ marker |= 2;
+ }
+
+ if (new_type || new_val)
+ marker |= process_binding (new_val, new_type);
+
+ return marker;
+}
+
/* Look in exactly namespace SCOPE. */
bool
name_lookup::search_namespace_only (tree scope)
{
bool found = false;
-
if (tree *binding = find_namespace_slot (scope, name))
{
- tree value = *binding, type = NULL_TREE;
-
- if (STAT_HACK_P (value))
+ tree val = *binding;
+ if (TREE_CODE (val) == MODULE_VECTOR)
{
- type = STAT_TYPE (value);
- value = STAT_DECL (value);
-
- if (!bool (want & LOOK_want::HIDDEN_FRIEND))
+ /* I presume the binding list is going to be sparser than
+ the import bitmap. Hence iterate over the former
+ checking for bits set in the bitmap. */
+ bitmap imports = get_import_bitmap ();
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (val);
+ int marker = 0;
+ int dup_detect = 0;
+
+ if (tree bind = cluster->slots[MODULE_SLOT_CURRENT])
{
- if (STAT_TYPE_HIDDEN_P (*binding))
- type = NULL_TREE;
- if (STAT_DECL_HIDDEN_P (*binding))
- value = NULL_TREE;
- else
+ if (!deduping)
+ {
+ if (named_module_purview_p ())
+ {
+ dup_detect |= 2;
+
+ if (STAT_HACK_P (bind) && MODULE_BINDING_GLOBAL_P (bind))
+ dup_detect |= 1;
+ }
+ else
+ dup_detect |= 1;
+ }
+ tree type = NULL_TREE;
+ tree value = bind;
+
+ if (STAT_HACK_P (bind))
+ {
+ type = STAT_TYPE (bind);
+ value = STAT_DECL (bind);
+
+ if (!bool (want & LOOK_want::HIDDEN_FRIEND))
+ {
+ if (STAT_TYPE_HIDDEN_P (bind))
+ type = NULL_TREE;
+ if (STAT_DECL_HIDDEN_P (bind))
+ value = NULL_TREE;
+ else
+ value = ovl_skip_hidden (value);
+ }
+ }
+ else if (!bool (want & LOOK_want::HIDDEN_FRIEND))
value = ovl_skip_hidden (value);
+
+ marker = process_module_binding (value, type, marker);
}
+
+ /* Scan the imported bindings. */
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (val);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ /* Do this in forward order, so we load modules in an order
+ the user expects. */
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ {
+ /* Are we importing this module? */
+ if (unsigned base = cluster->indices[jx].base)
+ if (unsigned span = cluster->indices[jx].span)
+ do
+ if (bitmap_bit_p (imports, base))
+ goto found;
+ while (++base, --span);
+ continue;
+
+ found:;
+ /* Is it loaded? */
+ if (cluster->slots[jx].is_lazy ())
+ {
+ gcc_assert (cluster->indices[jx].span == 1);
+ lazy_load_binding (cluster->indices[jx].base,
+ scope, name, &cluster->slots[jx]);
+ }
+ tree bind = cluster->slots[jx];
+ if (!bind)
+ /* Load errors could mean there's nothing here. */
+ continue;
+
+ /* Extract what we can see from here. If there's no
+ stat_hack, then everything was exported. */
+ tree type = NULL_TREE;
+
+
+ /* If STAT_HACK_P is false, everything is visible, and
+ there's no duplication possibilities. */
+ if (STAT_HACK_P (bind))
+ {
+ if (!deduping)
+ {
+ /* Do we need to engage deduplication? */
+ int dup = 0;
+ if (MODULE_BINDING_GLOBAL_P (bind))
+ dup = 1;
+ else if (MODULE_BINDING_PARTITION_P (bind))
+ dup = 2;
+ if (unsigned hit = dup_detect & dup)
+ {
+ if ((hit & 1 && MODULE_VECTOR_GLOBAL_DUPS_P (val))
+ || (hit & 2
+ && MODULE_VECTOR_PARTITION_DUPS_P (val)))
+ {
+ lookup_mark (value, true);
+ deduping = true;
+ }
+ }
+ dup_detect |= dup;
+ }
+
+ if (STAT_TYPE_VISIBLE_P (bind))
+ type = STAT_TYPE (bind);
+ bind = STAT_VISIBLE (bind);
+ }
+
+ /* And process it. */
+ marker = process_module_binding (bind, type, marker);
+ }
+ found |= marker & 1;
}
- else if (!bool (want & LOOK_want::HIDDEN_FRIEND))
- value = ovl_skip_hidden (value);
+ else
+ {
+ /* Only a current module binding, visible from the current module. */
+ tree bind = *binding;
+ tree value = bind, type = NULL_TREE;
- found |= process_binding (value, type);
+ if (STAT_HACK_P (bind))
+ {
+ type = STAT_TYPE (bind);
+ value = STAT_DECL (bind);
+
+ if (!bool (want & LOOK_want::HIDDEN_FRIEND))
+ {
+ if (STAT_TYPE_HIDDEN_P (bind))
+ type = NULL_TREE;
+ if (STAT_DECL_HIDDEN_P (bind))
+ value = NULL_TREE;
+ else
+ value = ovl_skip_hidden (value);
+ }
+ }
+ else if (!bool (want & LOOK_want::HIDDEN_FRIEND))
+ value = ovl_skip_hidden (value);
+
+ found |= process_binding (value, type);
+ }
}
return found;
@@ -777,20 +1190,147 @@ name_lookup::add_fns (tree fns)
add_overload (fns);
}
-/* Add functions of a namespace to the lookup structure. */
+/* Add the overloaded fns of SCOPE. */
void
-name_lookup::adl_namespace_only (tree scope)
+name_lookup::adl_namespace_fns (tree scope, bitmap imports)
{
- mark_seen (scope);
+ if (tree *binding = find_namespace_slot (scope, name))
+ {
+ tree val = *binding;
+ if (TREE_CODE (val) != MODULE_VECTOR)
+ add_fns (ovl_skip_hidden (MAYBE_STAT_DECL (val)));
+ else
+ {
+ /* I presume the binding list is going to be sparser than
+ the import bitmap. Hence iterate over the former
+ checking for bits set in the bitmap. */
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (val);
+ int dup_detect = 0;
- /* Look down into inline namespaces. */
- if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope))
- for (unsigned ix = inlinees->length (); ix--;)
- adl_namespace_only ((*inlinees)[ix]);
+ if (tree bind = cluster->slots[MODULE_SLOT_CURRENT])
+ {
+ /* The current TU's bindings must be visible, we don't
+ need to check the bitmaps. */
+
+ if (!deduping)
+ {
+ if (named_module_purview_p ())
+ {
+ dup_detect |= 2;
+
+ if (STAT_HACK_P (bind) && MODULE_BINDING_GLOBAL_P (bind))
+ dup_detect |= 1;
+ }
+ else
+ dup_detect |= 1;
+ }
+
+ add_fns (ovl_skip_hidden (MAYBE_STAT_DECL (bind)));
+ }
+
+ /* Scan the imported bindings. */
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (val);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ /* Do this in forward order, so we load modules in an order
+ the user expects. */
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ {
+ /* Functions are never on merged slots. */
+ if (!cluster->indices[jx].base
+ || cluster->indices[jx].span != 1)
+ continue;
+
+ /* Is this slot visible? */
+ if (!bitmap_bit_p (imports, cluster->indices[jx].base))
+ continue;
+
+ /* Is it loaded. */
+ if (cluster->slots[jx].is_lazy ())
+ lazy_load_binding (cluster->indices[jx].base,
+ scope, name, &cluster->slots[jx]);
+
+ tree bind = cluster->slots[jx];
+ if (!bind)
+ /* Load errors could mean there's nothing here. */
+ continue;
+
+ if (STAT_HACK_P (bind))
+ {
+ if (!deduping)
+ {
+ /* Do we need to engage deduplication? */
+ int dup = 0;
+ if (MODULE_BINDING_GLOBAL_P (bind))
+ dup = 1;
+ else if (MODULE_BINDING_PARTITION_P (bind))
+ dup = 2;
+ if (unsigned hit = dup_detect & dup)
+ {
+ if ((hit & 1 && MODULE_VECTOR_GLOBAL_DUPS_P (val))
+ || (hit & 2
+ && MODULE_VECTOR_PARTITION_DUPS_P (val)))
+ {
+ lookup_mark (value, true);
+ deduping = true;
+ }
+ }
+ dup_detect |= dup;
+ }
+
+ bind = STAT_VISIBLE (bind);
+ }
+
+ add_fns (bind);
+ }
+ }
+ }
+}
+
+/* Add the hidden friends of SCOPE. */
+
+void
+name_lookup::adl_class_fns (tree type)
+{
+ /* Add friends. */
+ for (tree list = DECL_FRIENDLIST (TYPE_MAIN_DECL (type));
+ list; list = TREE_CHAIN (list))
+ if (name == FRIEND_NAME (list))
+ {
+ tree context = NULL_TREE; /* Lazily computed. */
+ for (tree friends = FRIEND_DECLS (list); friends;
+ friends = TREE_CHAIN (friends))
+ {
+ tree fn = TREE_VALUE (friends);
+
+ /* Only interested in global functions with potentially hidden
+ (i.e. unqualified) declarations. */
+ if (!context)
+ context = decl_namespace_context (type);
+ if (CP_DECL_CONTEXT (fn) != context)
+ continue;
+
+ if (!deduping)
+ {
+ lookup_mark (value, true);
+ deduping = true;
+ }
+
+ /* Template specializations are never found by name lookup.
+ (Templates themselves can be found, but not template
+ specializations.) */
+ if (TREE_CODE (fn) == FUNCTION_DECL && DECL_USE_TEMPLATE (fn))
+ continue;
- if (tree fns = find_namespace_value (scope, name))
- add_fns (ovl_skip_hidden (fns));
+ add_fns (fn);
+ }
+ }
}
/* Find the containing non-inlined namespace, add it and all its
@@ -799,14 +1339,17 @@ name_lookup::adl_namespace_only (tree scope)
void
name_lookup::adl_namespace (tree scope)
{
- if (seen_p (scope))
+ if (see_and_mark (scope))
return;
- /* Find the containing non-inline namespace. */
- while (DECL_NAMESPACE_INLINE_P (scope))
- scope = CP_DECL_CONTEXT (scope);
+ /* Look down into inline namespaces. */
+ if (vec<tree, va_gc> *inlinees = DECL_NAMESPACE_INLINEES (scope))
+ for (unsigned ix = inlinees->length (); ix--;)
+ adl_namespace ((*inlinees)[ix]);
- adl_namespace_only (scope);
+ if (DECL_NAMESPACE_INLINE_P (scope))
+ /* Mark parent. */
+ adl_namespace (CP_DECL_CONTEXT (scope));
}
/* Adds the class and its friends to the lookup structure. */
@@ -826,31 +1369,6 @@ name_lookup::adl_class_only (tree type)
tree context = decl_namespace_context (type);
adl_namespace (context);
-
- complete_type (type);
-
- /* Add friends. */
- for (tree list = DECL_FRIENDLIST (TYPE_MAIN_DECL (type)); list;
- list = TREE_CHAIN (list))
- if (name == FRIEND_NAME (list))
- for (tree friends = FRIEND_DECLS (list); friends;
- friends = TREE_CHAIN (friends))
- {
- tree fn = TREE_VALUE (friends);
-
- /* Only interested in global functions with potentially hidden
- (i.e. unqualified) declarations. */
- if (CP_DECL_CONTEXT (fn) != context)
- continue;
-
- /* Template specializations are never found by name lookup.
- (Templates themselves can be found, but not template
- specializations.) */
- if (TREE_CODE (fn) == FUNCTION_DECL && DECL_USE_TEMPLATE (fn))
- continue;
-
- add_fns (fn);
- }
}
/* Adds the class and its bases to the lookup structure.
@@ -873,7 +1391,7 @@ name_lookup::adl_bases (tree type)
}
/* Adds everything associated with a class argument type to the lookup
- structure. Returns true on error.
+ structure.
If T is a class type (including unions), its associated classes are: the
class itself; the class of which it is a member, if any; and its direct
@@ -897,11 +1415,13 @@ name_lookup::adl_class (tree type)
return;
type = TYPE_MAIN_VARIANT (type);
+
/* We don't set found here because we have to have set seen first,
which is done in the adl_bases walk. */
if (found_p (type))
return;
+ complete_type (type);
adl_bases (type);
mark_found (type);
@@ -918,6 +1438,19 @@ name_lookup::adl_class (tree type)
}
}
+void
+name_lookup::adl_enum (tree type)
+{
+ type = TYPE_MAIN_VARIANT (type);
+ if (see_and_mark (type))
+ return;
+
+ if (TYPE_CLASS_SCOPE_P (type))
+ adl_class_only (TYPE_CONTEXT (type));
+ else
+ adl_namespace (decl_namespace_context (type));
+}
+
void
name_lookup::adl_expr (tree expr)
{
@@ -1003,9 +1536,7 @@ name_lookup::adl_type (tree type)
return;
case ENUMERAL_TYPE:
- if (TYPE_CLASS_SCOPE_P (type))
- adl_class_only (TYPE_CONTEXT (type));
- adl_namespace (decl_namespace_context (type));
+ adl_enum (type);
return;
case LANG_TYPE:
@@ -1074,10 +1605,9 @@ name_lookup::adl_template_arg (tree arg)
tree
name_lookup::search_adl (tree fns, vec<tree, va_gc> *args)
{
- deduping = true;
- lookup_mark (fns, true);
- value = fns;
-
+ gcc_checking_assert (!vec_safe_length (scopes));
+
+ /* Gather each associated entity onto the lookup's scope list. */
unsigned ix;
tree arg;
@@ -1089,7 +1619,91 @@ name_lookup::search_adl (tree fns, vec<tree, va_gc> *args)
else
adl_expr (arg);
- fns = value;
+ if (vec_safe_length (scopes))
+ {
+ /* Now do the lookups. */
+ if (fns)
+ {
+ deduping = true;
+ lookup_mark (fns, true);
+ }
+ value = fns;
+
+ /* INST_PATH will be NULL, if this is /not/ 2nd-phase ADL. */
+ bitmap inst_path = NULL;
+ /* VISIBLE is the regular import bitmap. */
+ bitmap visible = module_visible_instantiation_path (&inst_path);
+
+ for (unsigned ix = scopes->length (); ix--;)
+ {
+ tree scope = (*scopes)[ix];
+ if (TREE_CODE (scope) == NAMESPACE_DECL)
+ adl_namespace_fns (scope, visible);
+ else
+ {
+ if (RECORD_OR_UNION_TYPE_P (scope))
+ adl_class_fns (scope);
+
+ /* During 2nd phase ADL: Any exported declaration D in N
+ declared within the purview of a named module M
+ (10.2) is visible if there is an associated entity
+ attached to M with the same innermost enclosing
+ non-inline namespace as D.
+ [basic.lookup.argdep]/4.4 */
+
+ if (!inst_path)
+ /* Not 2nd phase. */
+ continue;
+
+ tree ctx = CP_DECL_CONTEXT (TYPE_NAME (scope));
+ if (TREE_CODE (ctx) != NAMESPACE_DECL)
+ /* Not namespace-scope class. */
+ continue;
+
+ tree origin = get_originating_module_decl (TYPE_NAME (scope));
+ if (!DECL_LANG_SPECIFIC (origin)
+ || !DECL_MODULE_IMPORT_P (origin))
+ /* Not imported. */
+ continue;
+
+ unsigned module = get_importing_module (origin);
+
+ if (!bitmap_bit_p (inst_path, module))
+ /* Not on path of instantiation. */
+ continue;
+
+ if (bitmap_bit_p (visible, module))
+ /* If the module was in the visible set, we'll look at
+ its namespace partition anyway. */
+ continue;
+
+ if (tree *slot = find_namespace_slot (ctx, name, false))
+ if (mc_slot *mslot = search_imported_binding_slot (slot, module))
+ {
+ if (mslot->is_lazy ())
+ lazy_load_binding (module, ctx, name, mslot);
+
+ if (tree bind = *mslot)
+ {
+ if (!deduping)
+ {
+ /* We must turn on deduping, because some
+ other class from this module might also
+ be in this namespace. */
+ deduping = true;
+ lookup_mark (value, true);
+ }
+
+ /* Add the exported fns */
+ if (STAT_HACK_P (bind))
+ add_fns (STAT_VISIBLE (bind));
+ }
+ }
+ }
+ }
+
+ fns = value;
+ }
return fns;
}
@@ -1284,6 +1898,41 @@ get_class_binding_direct (tree klass, tree name, bool want_type)
return val;
}
+/* We're about to lookup NAME in KLASS. Make sure any lazily declared
+ members are now declared. */
+
+static void
+maybe_lazily_declare (tree klass, tree name)
+{
+ tree main_decl = TYPE_NAME (TYPE_MAIN_VARIANT (klass));
+ if (DECL_LANG_SPECIFIC (main_decl)
+ && DECL_MODULE_PENDING_MEMBERS_P (main_decl))
+ lazy_load_members (main_decl);
+
+ /* Lazily declare functions, if we're going to search these. */
+ if (IDENTIFIER_CTOR_P (name))
+ {
+ if (CLASSTYPE_LAZY_DEFAULT_CTOR (klass))
+ lazily_declare_fn (sfk_constructor, klass);
+ if (CLASSTYPE_LAZY_COPY_CTOR (klass))
+ lazily_declare_fn (sfk_copy_constructor, klass);
+ if (CLASSTYPE_LAZY_MOVE_CTOR (klass))
+ lazily_declare_fn (sfk_move_constructor, klass);
+ }
+ else if (IDENTIFIER_DTOR_P (name))
+ {
+ if (CLASSTYPE_LAZY_DESTRUCTOR (klass))
+ lazily_declare_fn (sfk_destructor, klass);
+ }
+ else if (name == assign_op_identifier)
+ {
+ if (CLASSTYPE_LAZY_COPY_ASSIGN (klass))
+ lazily_declare_fn (sfk_copy_assignment, klass);
+ if (CLASSTYPE_LAZY_MOVE_ASSIGN (klass))
+ lazily_declare_fn (sfk_move_assignment, klass);
+ }
+}
+
/* Look for NAME's binding in exactly KLASS. See
get_class_binding_direct for argument description. Does lazy
special function creation as necessary. */
@@ -1294,30 +1943,7 @@ get_class_binding (tree klass, tree name, bool want_type /*=false*/)
klass = complete_type (klass);
if (COMPLETE_TYPE_P (klass))
- {
- /* Lazily declare functions, if we're going to search these. */
- if (IDENTIFIER_CTOR_P (name))
- {
- if (CLASSTYPE_LAZY_DEFAULT_CTOR (klass))
- lazily_declare_fn (sfk_constructor, klass);
- if (CLASSTYPE_LAZY_COPY_CTOR (klass))
- lazily_declare_fn (sfk_copy_constructor, klass);
- if (CLASSTYPE_LAZY_MOVE_CTOR (klass))
- lazily_declare_fn (sfk_move_constructor, klass);
- }
- else if (IDENTIFIER_DTOR_P (name))
- {
- if (CLASSTYPE_LAZY_DESTRUCTOR (klass))
- lazily_declare_fn (sfk_destructor, klass);
- }
- else if (name == assign_op_identifier)
- {
- if (CLASSTYPE_LAZY_COPY_ASSIGN (klass))
- lazily_declare_fn (sfk_copy_assignment, klass);
- if (CLASSTYPE_LAZY_MOVE_ASSIGN (klass))
- lazily_declare_fn (sfk_move_assignment, klass);
- }
- }
+ maybe_lazily_declare (klass, name);
return get_class_binding_direct (klass, name, want_type);
}
@@ -1338,14 +1964,11 @@ find_member_slot (tree klass, tree name)
vec_alloc (member_vec, 8);
CLASSTYPE_MEMBER_VEC (klass) = member_vec;
if (complete_p)
- {
- /* If the class is complete but had no member_vec, we need
- to add the TYPE_FIELDS into it. We're also most likely
- to be adding ctors & dtors, so ask for 6 spare slots (the
- abstract cdtors and their clones). */
- set_class_bindings (klass, 6);
- member_vec = CLASSTYPE_MEMBER_VEC (klass);
- }
+ /* If the class is complete but had no member_vec, we need to
+ add the TYPE_FIELDS into it. We're also most likely to be
+ adding ctors & dtors, so ask for 6 spare slots (the
+ abstract cdtors and their clones). */
+ member_vec = set_class_bindings (klass, 6);
}
if (IDENTIFIER_CONV_OP_P (name))
@@ -1665,7 +2288,11 @@ member_vec_dedup (vec<tree, va_gc> *member_vec)
if (!current)
current = to_type;
else
- current = stat_hack (current, to_type);
+ {
+ current = stat_hack (current, to_type);
+ /* Also point the chain at the decls. */
+ OVL_CHAIN (current) = STAT_DECL (current);
+ }
}
if (current)
@@ -1687,18 +2314,18 @@ member_vec_dedup (vec<tree, va_gc> *member_vec)
no existing MEMBER_VEC and fewer than 8 fields, do nothing. We
know there must be at least 1 field -- the self-reference
TYPE_DECL, except for anon aggregates, which will have at least
- one field anyway. */
+ one field anyway. If EXTRA < 0, always create the vector. */
-void
-set_class_bindings (tree klass, unsigned extra)
+vec<tree, va_gc> *
+set_class_bindings (tree klass, int extra)
{
unsigned n_fields = count_class_fields (klass);
vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass);
- if (member_vec || n_fields >= 8)
+ if (member_vec || n_fields >= 8 || extra < 0)
{
/* Append the new fields. */
- vec_safe_reserve_exact (member_vec, extra + n_fields);
+ vec_safe_reserve_exact (member_vec, n_fields + (extra >= 0 ? extra : 0));
member_vec_append_class_fields (member_vec, klass);
}
@@ -1708,6 +2335,8 @@ set_class_bindings (tree klass, unsigned extra)
member_vec->qsort (member_name_cmp);
member_vec_dedup (member_vec);
}
+
+ return member_vec;
}
/* Insert lately defined enum ENUMTYPE into KLASS for the sorted case. */
@@ -1856,7 +2485,7 @@ push_binding (tree id, tree decl, cp_binding_level* level)
void
pop_local_binding (tree id, tree decl)
{
- if (id == NULL_TREE)
+ if (!id || IDENTIFIER_ANON_P (id))
/* It's easiest to write the loops that call this function without
checking whether or not the entities involved have names. We
get here for such an entity. */
@@ -2202,8 +2831,9 @@ update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot,
tree to_type = old_type;
bool local_overload = false;
- gcc_assert (level->kind == sk_namespace ? !binding
+ gcc_assert (!level || level->kind == sk_namespace ? !binding
: level->kind != sk_class && !slot);
+
if (old == error_mark_node)
old = NULL_TREE;
@@ -2279,7 +2909,7 @@ update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot,
warning (OPT_Wshadow, "%q#D hides constructor for %q#D",
decl, to_type);
- local_overload = old && level->kind != sk_namespace;
+ local_overload = old && level && level->kind != sk_namespace;
to_val = ovl_insert (decl, old, -int (hiding));
}
else if (old)
@@ -2290,11 +2920,8 @@ update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot,
else if (TREE_CODE (old) == TYPE_DECL)
{
if (same_type_p (TREE_TYPE (old), TREE_TYPE (decl)))
- {
- /* Two type decls to the same type. Do nothing. */
- gcc_checking_assert (!hiding);
- return old;
- }
+ /* Two type decls to the same type. Do nothing. */
+ return old;
else
goto conflict;
}
@@ -2306,7 +2933,7 @@ update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot,
goto conflict;
/* The new one must be an alias at this point. */
- gcc_assert (DECL_NAMESPACE_ALIAS (decl) && !hiding);
+ gcc_assert (DECL_NAMESPACE_ALIAS (decl));
return old;
}
else if (TREE_CODE (old) == VAR_DECL)
@@ -2342,7 +2969,13 @@ update_binding (cp_binding_level *level, cxx_binding *binding, tree *slot,
update_local_overload (binding, to_val);
}
else
- add_decl_to_level (level, decl);
+ {
+ /* Don't add namespaces here. They're done in
+ push_namespace. */
+ if (level && (TREE_CODE (decl) != NAMESPACE_DECL
+ || DECL_NAMESPACE_ALIAS (decl)))
+ add_decl_to_level (level, decl);
+ }
if (slot)
{
@@ -2495,6 +3128,11 @@ check_local_shadow (tree decl)
if (DECL_EXTERNAL (decl))
return;
+ /* No need to do it when cloning, and with modules this can cause
+ out-of-order reading when we try and instantiate stuff. */
+ if (current_function_decl && DECL_CLONED_FUNCTION_P (current_function_decl))
+ return;
+
tree old = NULL_TREE;
cp_binding_level *old_scope = NULL;
if (cxx_binding *binding = outer_binding (DECL_NAME (decl), NULL, true))
@@ -2847,6 +3485,182 @@ push_local_extern_decl_alias (tree decl)
DECL_LOCAL_DECL_ALIAS (decl) = alias;
}
+/* NS needs to be exported, mark it and all its parents as exported. */
+
+static void
+implicitly_export_namespace (tree ns)
+{
+ while (!DECL_MODULE_EXPORT_P (ns))
+ {
+ DECL_MODULE_EXPORT_P (ns) = true;
+ ns = CP_DECL_CONTEXT (ns);
+ }
+}
+
+/* DECL has just been bound at LEVEL. finish up the bookkeeping. */
+
+static void
+newbinding_bookkeeping (tree name, tree decl, cp_binding_level *level)
+{
+ if (TREE_CODE (decl) == TYPE_DECL)
+ {
+ tree type = TREE_TYPE (decl);
+
+ if (type != error_mark_node)
+ {
+ if (TYPE_NAME (type) != decl)
+ set_underlying_type (decl);
+
+ set_identifier_type_value_with_scope (name, decl, level);
+
+ if (level->kind != sk_namespace
+ && !instantiating_current_function_p ())
+ /* If this is a locally defined typedef in a function that
+ is not a template instantation, record it to implement
+ -Wunused-local-typedefs. */
+ record_locally_defined_typedef (decl);
+ }
+ }
+ else
+ {
+ if (VAR_P (decl) && !DECL_LOCAL_DECL_P (decl))
+ maybe_register_incomplete_var (decl);
+
+ if (VAR_OR_FUNCTION_DECL_P (decl)
+ && DECL_EXTERN_C_P (decl))
+ check_extern_c_conflict (decl);
+ }
+}
+
+/* DECL is a global or module-purview entity. If it has non-internal
+ linkage, and we have a module vector, record it in the appropriate
+ slot. We have already checked for duplicates. */
+
+static void
+maybe_record_mergeable_decl (tree *slot, tree name, tree decl)
+{
+ if (TREE_CODE (*slot) != MODULE_VECTOR)
+ return;
+
+ if (!TREE_PUBLIC (CP_DECL_CONTEXT (decl)))
+ /* Member of internal namespace. */
+ return;
+
+ tree not_tmpl = STRIP_TEMPLATE (decl);
+ if ((TREE_CODE (not_tmpl) == FUNCTION_DECL
+ || TREE_CODE (not_tmpl) == VAR_DECL)
+ && DECL_THIS_STATIC (not_tmpl))
+ /* Internal linkage. */
+ return;
+
+ bool partition = named_module_p ();
+ tree *gslot = get_fixed_binding_slot
+ (slot, name, partition ? MODULE_SLOT_PARTITION : MODULE_SLOT_GLOBAL, true);
+
+ if (!partition)
+ {
+ mc_slot &orig
+ = MODULE_VECTOR_CLUSTER (*gslot, 0).slots[MODULE_SLOT_CURRENT];
+
+ if (!STAT_HACK_P (tree (orig)))
+ orig = stat_hack (tree (orig));
+
+ MODULE_BINDING_GLOBAL_P (tree (orig)) = true;
+ }
+
+ add_mergeable_namespace_entity (gslot, decl);
+}
+
+/* DECL is being pushed. Check whether it hides or ambiguates
+ something seen as an import. This include decls seen in our own
+ interface, which is OK. Also, check for merging a
+ global/partition decl. */
+
+static tree
+check_module_override (tree decl, tree mvec, bool hiding,
+ tree scope, tree name)
+{
+ bitmap imports = get_import_bitmap ();
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (mvec);
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (mvec);
+
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ cluster++;
+ ix--;
+ }
+
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ {
+ /* Are we importing this module? */
+ if (cluster->indices[jx].span != 1)
+ continue;
+ if (!cluster->indices[jx].base)
+ continue;
+ if (!bitmap_bit_p (imports, cluster->indices[jx].base))
+ continue;
+ /* Is it loaded? */
+ if (cluster->slots[jx].is_lazy ())
+ {
+ gcc_assert (cluster->indices[jx].span == 1);
+ lazy_load_binding (cluster->indices[jx].base,
+ scope, name, &cluster->slots[jx]);
+ }
+ tree bind = cluster->slots[jx];
+ if (!bind)
+ /* Errors could cause there to be nothing. */
+ continue;
+
+ if (STAT_HACK_P (bind))
+ /* We do not have to check STAT_TYPE here, the xref_tag
+ machinery deals with that problem. */
+ bind = STAT_VISIBLE (bind);
+
+ for (ovl_iterator iter (bind); iter; ++iter)
+ if (iter.using_p ())
+ ;
+ else if (tree match = duplicate_decls (decl, *iter, hiding))
+ {
+ if (TREE_CODE (match) == TYPE_DECL)
+ /* The IDENTIFIER will have the type referring to the
+ now-smashed TYPE_DECL, because ...? Reset it. */
+ SET_IDENTIFIER_TYPE_VALUE (name, TREE_TYPE (match));
+
+ return match;
+ }
+ }
+
+ if (TREE_PUBLIC (scope) && TREE_PUBLIC (decl) && !not_module_p ()
+ /* Namespaces are dealt with specially in
+ make_namespace_finish. */
+ && !(TREE_CODE (decl) == NAMESPACE_DECL && !DECL_NAMESPACE_ALIAS (decl)))
+ {
+ /* Look in the appropriate mergeable decl slot. */
+ tree mergeable = NULL_TREE;
+ if (named_module_p ())
+ mergeable = MODULE_VECTOR_CLUSTER (mvec, MODULE_SLOT_PARTITION
+ / MODULE_VECTOR_SLOTS_PER_CLUSTER)
+ .slots[MODULE_SLOT_PARTITION % MODULE_VECTOR_SLOTS_PER_CLUSTER];
+ else
+ mergeable = MODULE_VECTOR_CLUSTER (mvec, 0).slots[MODULE_SLOT_GLOBAL];
+
+ for (ovl_iterator iter (mergeable); iter; ++iter)
+ {
+ tree match = *iter;
+
+ if (duplicate_decls (decl, match, hiding))
+ {
+ if (TREE_CODE (match) == TYPE_DECL)
+ SET_IDENTIFIER_TYPE_VALUE (name, TREE_TYPE (match));
+ return match;
+ }
+ }
+ }
+
+ return NULL_TREE;
+}
+
/* Record DECL as belonging to the current lexical scope. Check for
errors (such as an incompatible declaration for the same name
already seen in the same scope). IS_FRIEND is true if DECL is
@@ -2875,11 +3689,12 @@ do_pushdecl (tree decl, bool hiding)
/* An anonymous namespace has a NULL DECL_NAME, but we still want to
insert it. Other NULL-named decls, not so much. */
tree name = DECL_NAME (decl);
- if (name || TREE_CODE (decl) == NAMESPACE_DECL)
+ if (name ? !IDENTIFIER_ANON_P (name) : TREE_CODE (decl) == NAMESPACE_DECL)
{
cxx_binding *binding = NULL; /* Local scope binding. */
tree ns = NULL_TREE; /* Searched namespace. */
tree *slot = NULL; /* Binding slot in namespace. */
+ tree *mslot = NULL; /* Current module slot in namespace. */
tree old = NULL_TREE;
if (level->kind == sk_namespace)
@@ -2893,7 +3708,11 @@ do_pushdecl (tree decl, bool hiding)
that's where we'll be pushing anyway. */
slot = find_namespace_slot (ns, name, ns == current_namespace);
if (slot)
- old = MAYBE_STAT_DECL (*slot);
+ {
+ mslot = get_fixed_binding_slot (slot, name, MODULE_SLOT_CURRENT,
+ ns == current_namespace);
+ old = MAYBE_STAT_DECL (*mslot);
+ }
}
else
{
@@ -2908,6 +3727,10 @@ do_pushdecl (tree decl, bool hiding)
for (ovl_iterator iter (old); iter; ++iter)
if (iter.using_p ())
; /* Ignore using decls here. */
+ else if (iter.hidden_p ()
+ && DECL_LANG_SPECIFIC (*iter)
+ && DECL_MODULE_IMPORT_P (*iter))
+ ; /* An undeclared builtin imported from elsewhere. */
else if (tree match
= duplicate_decls (decl, *iter, hiding, iter.hidden_p ()))
{
@@ -2946,6 +3769,26 @@ do_pushdecl (tree decl, bool hiding)
return match;
}
+ /* Check for redeclaring an import. */
+ if (slot && *slot && TREE_CODE (*slot) == MODULE_VECTOR)
+ if (tree match
+ = check_module_override (decl, *slot, hiding, ns, name))
+ {
+ if (match == error_mark_node)
+ return match;
+
+ /* We found a decl in an interface, push it into this
+ binding. */
+ decl = update_binding (NULL, binding, mslot, old,
+ match, hiding);
+
+ if (match == decl && DECL_MODULE_EXPORT_P (decl)
+ && !DECL_MODULE_EXPORT_P (level->this_entity))
+ implicitly_export_namespace (level->this_entity);
+
+ return decl;
+ }
+
/* We are pushing a new decl. */
/* Skip a hidden builtin we failed to match already. There can
@@ -2953,104 +3796,400 @@ do_pushdecl (tree decl, bool hiding)
if (old && anticipated_builtin_p (old))
old = OVL_CHAIN (old);
- check_template_shadow (decl);
+ check_template_shadow (decl);
+
+ if (DECL_DECLARES_FUNCTION_P (decl))
+ {
+ check_default_args (decl);
+
+ if (hiding)
+ {
+ if (level->kind != sk_namespace)
+ {
+ /* In a local class, a friend function declaration must
+ find a matching decl in the innermost non-class scope.
+ [class.friend/11] */
+ error_at (DECL_SOURCE_LOCATION (decl),
+ "friend declaration %qD in local class without "
+ "prior local declaration", decl);
+ /* Don't attempt to push it. */
+ return error_mark_node;
+ }
+ }
+ }
+
+ if (level->kind != sk_namespace)
+ {
+ check_local_shadow (decl);
+
+ if (TREE_CODE (decl) == NAMESPACE_DECL)
+ /* A local namespace alias. */
+ set_identifier_type_value_with_scope (name, NULL_TREE, level);
+
+ if (!binding)
+ binding = create_local_binding (level, name);
+ }
+ else if (!slot)
+ {
+ ns = current_namespace;
+ slot = find_namespace_slot (ns, name, true);
+ mslot = get_fixed_binding_slot (slot, name, MODULE_SLOT_CURRENT, true);
+ /* Update OLD to reflect the namespace we're going to be
+ pushing into. */
+ old = MAYBE_STAT_DECL (*mslot);
+ }
+
+ old = update_binding (level, binding, mslot, old, decl, hiding);
+
+ if (old != decl)
+ /* An existing decl matched, use it. */
+ decl = old;
+ else
+ {
+ newbinding_bookkeeping (name, decl, level);
+
+ if (VAR_OR_FUNCTION_DECL_P (decl)
+ && DECL_LOCAL_DECL_P (decl)
+ && TREE_CODE (CP_DECL_CONTEXT (decl)) == NAMESPACE_DECL)
+ push_local_extern_decl_alias (decl);
+
+ if (level->kind == sk_namespace
+ && TREE_PUBLIC (level->this_entity))
+ {
+ if (TREE_CODE (decl) != CONST_DECL
+ && DECL_MODULE_EXPORT_P (decl)
+ && !DECL_MODULE_EXPORT_P (level->this_entity))
+ implicitly_export_namespace (level->this_entity);
+
+ if (!not_module_p ())
+ maybe_record_mergeable_decl (slot, name, decl);
+ }
+ }
+ }
+ else
+ add_decl_to_level (level, decl);
+
+ return decl;
+}
+
+/* Record a decl-node X as belonging to the current lexical scope.
+ It's a friend if IS_FRIEND is true -- which affects exactly where
+ we push it. */
+
+tree
+pushdecl (tree x, bool hiding)
+{
+ bool subtime = timevar_cond_start (TV_NAME_LOOKUP);
+ tree ret = do_pushdecl (x, hiding);
+ timevar_cond_stop (TV_NAME_LOOKUP, subtime);
+ return ret;
+}
+
+/* A mergeable entity is being loaded into namespace NS slot NAME.
+ Create and return the appropriate vector slot for that. Either a
+ GMF slot or a module-specific one. */
+
+tree *
+mergeable_namespace_slots (tree ns, tree name, bool is_global, tree *vec)
+{
+ tree *mslot = find_namespace_slot (ns, name, true);
+ tree *vslot = get_fixed_binding_slot
+ (mslot, name, is_global ? MODULE_SLOT_GLOBAL : MODULE_SLOT_PARTITION, true);
+
+ gcc_checking_assert (TREE_CODE (*mslot) == MODULE_VECTOR);
+ *vec = *mslot;
+
+ return vslot;
+}
+
+/* DECL is a new mergeable namespace-scope decl. Add it to the
+ mergeable entities on GSLOT. */
+
+void
+add_mergeable_namespace_entity (tree *gslot, tree decl)
+{
+ *gslot = ovl_make (decl, *gslot);
+}
+
+/* A mergeable entity of KLASS called NAME is being loaded. Return
+ the set of things it could be. All such non-as_base classes have
+ been given a member vec. */
+
+tree
+lookup_class_binding (tree klass, tree name)
+{
+ tree found = NULL_TREE;
+
+ if (!COMPLETE_TYPE_P (klass))
+ ;
+ else if (TYPE_LANG_SPECIFIC (klass))
+ {
+ vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass);
+
+ found = member_vec_binary_search (member_vec, name);
+ if (IDENTIFIER_CONV_OP_P (name))
+ {
+ gcc_checking_assert (name == conv_op_identifier);
+ if (found)
+ found = OVL_CHAIN (found);
+ }
+ }
+ else
+ {
+ gcc_checking_assert (IS_FAKE_BASE_TYPE (klass)
+ || TYPE_PTRMEMFUNC_P (klass));
+ found = fields_linear_search (klass, name, false);
+ }
+
+ return found;
+}
+
+/* Given a namespace-level binding BINDING, walk it, calling CALLBACK
+ for all decls of the current module. When partitions are involved,
+ decls might be mentioned more than once. */
+
+unsigned
+walk_module_binding (tree binding, bitmap partitions,
+ bool (*callback) (tree decl, WMB_Flags, void *data),
+ void *data)
+{
+ // FIXME: We don't quite deal with using decls naming stat hack
+ // type.
+ // Also using decls exporting something from the same scope
+ tree current = binding;
+ unsigned count = 0;
+
+ if (TREE_CODE (binding) == MODULE_VECTOR)
+ current = MODULE_VECTOR_CLUSTER (binding, 0).slots[MODULE_SLOT_CURRENT];
+
+ bool decl_hidden = false;
+ if (tree type = MAYBE_STAT_TYPE (current))
+ {
+ WMB_Flags flags = WMB_None;
+ if (STAT_TYPE_HIDDEN_P (current))
+ flags = WMB_Flags (flags | WMB_Hidden);
+ count += callback (type, flags, data);
+ decl_hidden = STAT_DECL_HIDDEN_P (current);
+ }
+
+ for (ovl_iterator iter (MAYBE_STAT_DECL (current)); iter; ++iter)
+ {
+ if (iter.hidden_p ())
+ decl_hidden = true;
+ if (!(decl_hidden && DECL_UNDECLARED_BUILTIN_P (*iter)))
+ {
+ WMB_Flags flags = WMB_None;
+ if (decl_hidden)
+ flags = WMB_Flags (flags | WMB_Hidden);
+ if (iter.using_p ())
+ {
+ flags = WMB_Flags (flags | WMB_Using);
+ if (iter.exporting_p ())
+ flags = WMB_Flags (flags | WMB_Export);
+ }
+ count += callback (*iter, flags, data);
+ }
+ decl_hidden = false;
+ }
+
+ if (partitions && TREE_CODE (binding) == MODULE_VECTOR)
+ {
+ /* Process partition slots. */
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (binding);
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (binding);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ bool maybe_dups = MODULE_VECTOR_PARTITION_DUPS_P (binding);
+
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ if (!cluster->slots[jx].is_lazy ())
+ if (tree bind = cluster->slots[jx])
+ {
+ if (TREE_CODE (bind) == NAMESPACE_DECL
+ && !DECL_NAMESPACE_ALIAS (bind))
+ {
+ if (unsigned base = cluster->indices[jx].base)
+ if (unsigned span = cluster->indices[jx].span)
+ do
+ if (bitmap_bit_p (partitions, base))
+ goto found;
+ while (++base, --span);
+ /* Not a partition's namespace. */
+ continue;
+ found:
+
+ WMB_Flags flags = WMB_None;
+ if (maybe_dups)
+ flags = WMB_Flags (flags | WMB_Dups);
+ count += callback (bind, flags, data);
+ }
+ else if (STAT_HACK_P (bind) && MODULE_BINDING_PARTITION_P (bind))
+ {
+ if (tree btype = STAT_TYPE (bind))
+ {
+ WMB_Flags flags = WMB_None;
+ if (maybe_dups)
+ flags = WMB_Flags (flags | WMB_Dups);
+ if (STAT_TYPE_HIDDEN_P (bind))
+ flags = WMB_Flags (flags | WMB_Hidden);
+
+ count += callback (btype, flags, data);
+ }
+ bool hidden = STAT_DECL_HIDDEN_P (bind);
+ for (ovl_iterator iter (MAYBE_STAT_DECL (STAT_DECL (bind)));
+ iter; ++iter)
+ {
+ if (iter.hidden_p ())
+ hidden = true;
+ gcc_checking_assert
+ (!(hidden && DECL_UNDECLARED_BUILTIN_P (*iter)));
+
+ WMB_Flags flags = WMB_None;
+ if (maybe_dups)
+ flags = WMB_Flags (flags | WMB_Dups);
+ if (decl_hidden)
+ flags = WMB_Flags (flags | WMB_Hidden);
+ if (iter.using_p ())
+ {
+ flags = WMB_Flags (flags | WMB_Using);
+ if (iter.exporting_p ())
+ flags = WMB_Flags (flags | WMB_Export);
+ }
+ count += callback (*iter, flags, data);
+ hidden = false;
+ }
+ }
+ }
+ }
+
+ return count;
+}
+
+/* Imported module MOD has a binding to NS::NAME, stored in section
+ SNUM. */
+
+bool
+import_module_binding (tree ns, tree name, unsigned mod, unsigned snum)
+{
+ tree *slot = find_namespace_slot (ns, name, true);
+ mc_slot *mslot = append_imported_binding_slot (slot, name, mod);
+
+ if (mslot->is_lazy () || *mslot)
+ /* Oops, something was already there. */
+ return false;
+
+ mslot->set_lazy (snum);
+ return true;
+}
+
+/* An import of MODULE is binding NS::NAME. There should be no
+ existing binding for >= MODULE. MOD_GLOB indicates whether MODULE
+ is a header_unit (-1) or part of the current module (+1). VALUE
+ and TYPE are the value and type bindings. VISIBLE are the value
+ bindings being exported. */
+
+bool
+set_module_binding (tree ns, tree name, unsigned mod, int mod_glob,
+ tree value, tree type, tree visible)
+{
+ if (!value)
+ /* Bogus BMIs could give rise to nothing to bind. */
+ return false;
- if (DECL_DECLARES_FUNCTION_P (decl))
- {
- check_default_args (decl);
+ gcc_assert (TREE_CODE (value) != NAMESPACE_DECL
+ || DECL_NAMESPACE_ALIAS (value));
+ gcc_checking_assert (mod);
- if (hiding)
- {
- if (level->kind != sk_namespace)
- {
- /* In a local class, a friend function declaration must
- find a matching decl in the innermost non-class scope.
- [class.friend/11] */
- error_at (DECL_SOURCE_LOCATION (decl),
- "friend declaration %qD in local class without "
- "prior local declaration", decl);
- /* Don't attempt to push it. */
- return error_mark_node;
- }
- }
- }
+ tree *slot = find_namespace_slot (ns, name, true);
+ mc_slot *mslot = search_imported_binding_slot (slot, mod);
- if (level->kind != sk_namespace)
- {
- check_local_shadow (decl);
+ if (!mslot || !mslot->is_lazy ())
+ /* Again, bogus BMI could give find to missing or already loaded slot. */
+ return false;
- if (TREE_CODE (decl) == NAMESPACE_DECL)
- /* A local namespace alias. */
- set_identifier_type_value_with_scope (name, NULL_TREE, level);
+ tree bind = value;
+ if (type || visible != bind || mod_glob)
+ {
+ bind = stat_hack (bind, type);
+ STAT_VISIBLE (bind) = visible;
+ if ((mod_glob > 0 && TREE_PUBLIC (ns))
+ || (type && DECL_MODULE_EXPORT_P (type)))
+ STAT_TYPE_VISIBLE_P (bind) = true;
+ }
- if (!binding)
- binding = create_local_binding (level, name);
- }
- else if (!slot)
- {
- ns = current_namespace;
- slot = find_namespace_slot (ns, name, true);
- /* Update OLD to reflect the namespace we're going to be
- pushing into. */
- old = MAYBE_STAT_DECL (*slot);
- }
+ /* Note if this is this-module or global binding. */
+ if (mod_glob > 0)
+ MODULE_BINDING_PARTITION_P (bind) = true;
+ else if (mod_glob < 0)
+ MODULE_BINDING_GLOBAL_P (bind) = true;
- old = update_binding (level, binding, slot, old, decl, hiding);
+ *mslot = bind;
- if (old != decl)
- /* An existing decl matched, use it. */
- decl = old;
- else if (TREE_CODE (decl) == TYPE_DECL)
- {
- tree type = TREE_TYPE (decl);
+ return true;
+}
- if (type != error_mark_node)
- {
- if (TYPE_NAME (type) != decl)
- set_underlying_type (decl);
+void
+note_pending_specializations (tree ns, tree name, bool is_header)
+{
+ if (tree *slot = find_namespace_slot (ns, name, false))
+ if (TREE_CODE (*slot) == MODULE_VECTOR)
+ {
+ tree vec = *slot;
+ MODULE_VECTOR_PENDING_SPECIALIZATIONS_P (vec) = true;
+ if (is_header)
+ MODULE_VECTOR_PENDING_IS_HEADER_P (vec) = true;
+ else
+ MODULE_VECTOR_PENDING_IS_PARTITION_P (vec) = true;
+ }
+}
- set_identifier_type_value_with_scope (name, decl, level);
- }
+void
+load_pending_specializations (tree ns, tree name)
+{
+ tree *slot = find_namespace_slot (ns, name, false);
- /* If this is a locally defined typedef in a function that
- is not a template instantation, record it to implement
- -Wunused-local-typedefs. */
- if (!instantiating_current_function_p ())
- record_locally_defined_typedef (decl);
- }
- else
- {
- if (VAR_P (decl) && !DECL_LOCAL_DECL_P (decl))
- maybe_register_incomplete_var (decl);
+ if (!slot || TREE_CODE (*slot) != MODULE_VECTOR
+ || !MODULE_VECTOR_PENDING_SPECIALIZATIONS_P (*slot))
+ return;
- if (VAR_OR_FUNCTION_DECL_P (decl))
- {
- if (DECL_LOCAL_DECL_P (decl)
- && TREE_CODE (CP_DECL_CONTEXT (decl)) == NAMESPACE_DECL)
- push_local_extern_decl_alias (decl);
+ tree vec = *slot;
+ MODULE_VECTOR_PENDING_SPECIALIZATIONS_P (vec) = false;
- if (DECL_EXTERN_C_P (decl))
- check_extern_c_conflict (decl);
- }
- }
+ bool do_header = MODULE_VECTOR_PENDING_IS_HEADER_P (vec);
+ bool do_partition = MODULE_VECTOR_PENDING_IS_PARTITION_P (vec);
+ MODULE_VECTOR_PENDING_IS_HEADER_P (vec) = false;
+ MODULE_VECTOR_PENDING_IS_PARTITION_P (vec) = false;
+
+ gcc_checking_assert (do_header | do_partition);
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (vec);
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (vec);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
}
- else
- add_decl_to_level (level, decl);
- return decl;
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ if (cluster->indices[jx].span
+ && cluster->slots[jx].is_lazy ()
+ && lazy_specializations_p (cluster->indices[jx].base,
+ do_header, do_partition))
+ lazy_load_binding (cluster->indices[jx].base, ns, name,
+ &cluster->slots[jx]);
}
-/* Record a decl-node X as belonging to the current lexical scope.
- It's a friend if IS_FRIEND is true -- which affects exactly where
- we push it. */
-
-tree
-pushdecl (tree x, bool hiding)
+void
+add_module_decl (tree ns, tree name, tree decl)
{
- bool subtime = timevar_cond_start (TV_NAME_LOOKUP);
- tree ret = do_pushdecl (x, hiding);
- timevar_cond_stop (TV_NAME_LOOKUP, subtime);
- return ret;
+ gcc_assert (!DECL_CHAIN (decl));
+ add_decl_to_level (NAMESPACE_LEVEL (ns), decl);
+ newbinding_bookkeeping (name, decl, NAMESPACE_LEVEL (ns));
}
/* Enter DECL into the symbol table, if that's appropriate. Returns
@@ -3654,7 +4793,7 @@ set_identifier_type_value_with_scope (tree id, tree decl, cp_binding_level *b)
else
{
gcc_assert (decl);
- if (CHECKING_P)
+ if (false && CHECKING_P)
{
tree *slot = find_namespace_slot (current_namespace, id);
gcc_checking_assert (slot
@@ -3778,7 +4917,7 @@ pushdecl_outermost_localscope (tree x)
static bool
do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
- tree *value_p, tree *type_p)
+ bool insert_p, tree *value_p, tree *type_p)
{
tree value = *value_p;
tree type = *type_p;
@@ -3798,13 +4937,33 @@ do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
lookup.value = NULL_TREE;
}
+ /* Only process exporting if we're going to be inserting. */
+ bool revealing_p = insert_p && !fn_scope_p && module_has_cmi_p ();
+
+ /* First do the value binding. */
if (!lookup.value)
- /* Nothing. */;
+ /* Nothing (only implicit typedef found). */
+ gcc_checking_assert (lookup.type);
else if (OVL_P (lookup.value) && (!value || OVL_P (value)))
{
for (lkp_iterator usings (lookup.value); usings; ++usings)
{
tree new_fn = *usings;
+ bool exporting = revealing_p && module_exporting_p ();
+ if (exporting)
+ {
+ /* If the using decl is exported, the things it refers
+ to must also be exported (or not in module purview). */
+ if (!DECL_MODULE_EXPORT_P (new_fn)
+ && (DECL_LANG_SPECIFIC (new_fn)
+ && DECL_MODULE_PURVIEW_P (new_fn)))
+ {
+ error ("%q#D does not have external linkage", new_fn);
+ inform (DECL_SOURCE_LOCATION (new_fn),
+ "%q#D declared here", new_fn);
+ exporting = false;
+ }
+ }
/* [namespace.udecl]
@@ -3812,6 +4971,10 @@ do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
scope has the same name and the same parameter types as a
function introduced by a using declaration the program is
ill-formed. */
+ /* This seems overreaching, asking core -- why do we care
+ about decls in the namespace that we cannot name (because
+ they are not transitively imported. We just check the
+ decls that are in this TU. */
bool found = false;
for (ovl_iterator old (value); !found && old; ++old)
{
@@ -3820,8 +4983,25 @@ do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
if (new_fn == old_fn)
{
/* The function already exists in the current
- namespace. */
+ namespace. We will still want to insert it if
+ it is revealing a not-revealed thing. */
found = true;
+ if (!revealing_p)
+ ;
+ else if (old.using_p ())
+ {
+ if (exporting)
+ /* Update in place. 'tis ok. */
+ OVL_EXPORT_P (old.get_using ()) = true;
+ ;
+ }
+ else if (DECL_MODULE_EXPORT_P (new_fn))
+ ;
+ else
+ {
+ value = old.remove_node (value);
+ found = false;
+ }
break;
}
else if (old.using_p ())
@@ -3845,11 +5025,11 @@ do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
}
}
- if (!found)
+ if (!found && insert_p)
/* Unlike the decl-pushing case we don't drop anticipated
builtins here. They don't cause a problem, and we'd
like to match them with a future declaration. */
- value = ovl_insert (new_fn, value, true);
+ value = ovl_insert (new_fn, value, 1 + exporting);
}
}
else if (value
@@ -3860,28 +5040,34 @@ do_nonmember_using_decl (name_lookup &lookup, bool fn_scope_p,
diagnose_name_conflict (lookup.value, value);
failed = true;
}
- else
+ else if (insert_p)
+ // FIXME:what if we're newly exporting lookup.value
value = lookup.value;
-
+
+ /* Now the type binding. */
if (lookup.type && lookup.type != type)
{
+ // FIXME: What if we're exporting lookup.type?
if (type && !decls_match (lookup.type, type))
{
diagnose_name_conflict (lookup.type, type);
failed = true;
}
- else
+ else if (insert_p)
type = lookup.type;
}
- /* If value is empty, shift any class or enumeration name back. */
- if (!value)
+ if (insert_p)
{
- value = type;
- type = NULL_TREE;
+ /* If value is empty, shift any class or enumeration name back. */
+ if (!value)
+ {
+ value = type;
+ type = NULL_TREE;
+ }
+ *value_p = value;
+ *type_p = type;
}
- *value_p = value;
- *type_p = type;
return failed;
}
@@ -4645,8 +5831,10 @@ do_class_using_decl (tree scope, tree name)
}
-/* Return the binding for NAME in NS. If NS is NULL, look in
- global_namespace. */
+/* Return the binding for NAME in NS in the current TU. If NS is
+ NULL, look in global_namespace. We will not find declarations
+ from imports. Users of this who, having found nothing, push a new
+ decl must be prepared for that pushing to match an existing decl. */
tree
get_namespace_binding (tree ns, tree name)
@@ -4655,7 +5843,18 @@ get_namespace_binding (tree ns, tree name)
if (!ns)
ns = global_namespace;
gcc_checking_assert (!DECL_NAMESPACE_ALIAS (ns));
- tree ret = find_namespace_value (ns, name);
+ tree ret = NULL_TREE;
+
+ if (tree *b = find_namespace_slot (ns, name))
+ {
+ ret = *b;
+
+ if (TREE_CODE (ret) == MODULE_VECTOR)
+ ret = MODULE_VECTOR_CLUSTER (ret, 0).slots[0];
+ if (ret)
+ ret = MAYBE_STAT_DECL (ret);
+ }
+
timevar_cond_stop (TV_NAME_LOOKUP, subtime);
return ret;
}
@@ -4956,6 +6155,9 @@ do_namespace_alias (tree alias, tree name_space)
DECL_NAMESPACE_ALIAS (alias) = name_space;
DECL_EXTERNAL (alias) = 1;
DECL_CONTEXT (alias) = FROB_CONTEXT (current_scope ());
+
+ set_originating_module (alias);
+
pushdecl (alias);
/* Emit debug info for namespace alias. */
@@ -5033,26 +6235,100 @@ finish_nonmember_using_decl (tree scope, tree name)
if (current_binding_level->kind == sk_namespace)
{
tree *slot = find_namespace_slot (current_namespace, name, true);
+ tree *mslot = get_fixed_binding_slot (slot, name,
+ MODULE_SLOT_CURRENT, true);
+ bool failed = false;
+
+ if (mslot != slot)
+ {
+ /* A module vector. I presume the binding list is going to
+ be sparser than the import bitmap. Hence iterate over
+ the former checking for bits set in the bitmap. */
+ bitmap imports = get_import_bitmap ();
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (*slot);
+
+ /* Scan the imported bindings. */
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (*slot);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ /* Do this in forward order, so we load modules in an order
+ the user expects. */
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ {
+ /* Are we importing this module? */
+ if (unsigned base = cluster->indices[jx].base)
+ if (unsigned span = cluster->indices[jx].span)
+ do
+ if (bitmap_bit_p (imports, base))
+ goto found;
+ while (++base, --span);
+ continue;
+
+ found:;
+ /* Is it loaded? */
+ if (cluster->slots[jx].is_lazy ())
+ {
+ gcc_assert (cluster->indices[jx].span == 1);
+ lazy_load_binding (cluster->indices[jx].base,
+ scope, name, &cluster->slots[jx]);
+ }
+
+ tree value = cluster->slots[jx];
+ if (!value)
+ /* Load errors could mean there's nothing here. */
+ continue;
+
+ /* Extract what we can see from here. If there's no
+ stat_hack, then everything was exported. */
+ tree type = NULL_TREE;
- tree value = MAYBE_STAT_DECL (*slot);
- tree type = MAYBE_STAT_TYPE (*slot);
+ /* If no stat hack, everything is visible. */
+ if (STAT_HACK_P (value))
+ {
+ if (STAT_TYPE_VISIBLE_P (value))
+ type = STAT_TYPE (value);
+ value = STAT_VISIBLE (value);
+ }
- do_nonmember_using_decl (lookup, false, &value, &type);
+ if (do_nonmember_using_decl (lookup, false, false,
+ &value, &type))
+ {
+ failed = true;
+ break;
+ }
+ }
+ }
- if (STAT_HACK_P (*slot))
+ if (!failed)
{
- STAT_DECL (*slot) = value;
- STAT_TYPE (*slot) = type;
+ /* Now do the current slot. */
+ tree value = MAYBE_STAT_DECL (*mslot);
+ tree type = MAYBE_STAT_TYPE (*mslot);
+
+ do_nonmember_using_decl (lookup, false, true, &value, &type);
+
+ // FIXME: Partition mergeableness?
+ if (STAT_HACK_P (*mslot))
+ {
+ STAT_DECL (*mslot) = value;
+ STAT_TYPE (*mslot) = type;
+ }
+ else if (type)
+ *mslot = stat_hack (value, type);
+ else
+ *mslot = value;
}
- else if (type)
- *slot = stat_hack (value, type);
- else
- *slot = value;
}
else
{
tree using_decl = build_lang_decl (USING_DECL, lookup.name, NULL_TREE);
USING_DECL_SCOPE (using_decl) = scope;
+ DECL_CONTEXT (using_decl) = current_function_decl;
add_decl_expr (using_decl);
cxx_binding *binding = find_local_binding (current_binding_level, name);
@@ -5067,7 +6343,7 @@ finish_nonmember_using_decl (tree scope, tree name)
/* DR 36 questions why using-decls at function scope may not be
duplicates. Disallow it, as C++11 claimed and PR 20420
implemented. */
- do_nonmember_using_decl (lookup, true, &value, &type);
+ do_nonmember_using_decl (lookup, true, true, &value, &type);
if (!value)
;
@@ -5719,7 +6995,7 @@ get_std_name_hint (const char *name)
/* Describe DIALECT. */
-static const char *
+const char *
get_cxx_dialect_name (enum cxx_dialect dialect)
{
switch (dialect)
@@ -5885,7 +7161,14 @@ lookup_qualified_name (tree scope, tree name, LOOK_want want, bool complain)
name_lookup lookup (name, want);
if (qualified_namespace_lookup (scope, &lookup))
- t = lookup.value;
+ {
+ t = lookup.value;
+
+ /* If we have a known type overload, pull it out. This can happen
+ for using decls. */
+ if (TREE_CODE (t) == OVERLOAD && TREE_TYPE (t) != unknown_type_node)
+ t = OVL_FUNCTION (t);
+ }
}
else if (cxx_dialect != cxx98 && TREE_CODE (scope) == ENUMERAL_TYPE)
t = lookup_enumerator (scope, name);
@@ -5983,9 +7266,10 @@ maybe_add_fuzzy_decl (auto_vec<tree> &vec, tree decl)
}
/* Examing the namespace binding BINDING, and add at most one instance
- of the name, if it contains a visible entity of interest. */
+ of the name, if it contains a visible entity of interest. Return
+ true if we added something. */
-void
+bool
maybe_add_fuzzy_binding (auto_vec<tree> &vec, tree binding,
lookup_name_fuzzy_kind kind)
{
@@ -5997,7 +7281,7 @@ maybe_add_fuzzy_binding (auto_vec<tree> &vec, tree binding,
&& STAT_TYPE (binding))
{
if (maybe_add_fuzzy_decl (vec, STAT_TYPE (binding)))
- return;
+ return true;
}
else if (!STAT_DECL_HIDDEN_P (binding))
value = STAT_DECL (binding);
@@ -6012,8 +7296,11 @@ maybe_add_fuzzy_binding (auto_vec<tree> &vec, tree binding,
if (kind != FUZZY_LOOKUP_TYPENAME
|| TREE_CODE (STRIP_TEMPLATE (value)) == TYPE_DECL)
if (maybe_add_fuzzy_decl (vec, value))
- return;
+ return true;
}
+
+ /* Nothing found. */
+ return false;
}
/* Helper function for lookup_name_fuzzy.
@@ -6079,8 +7366,54 @@ consider_binding_level (tree name, best_match <tree, const char *> &bm,
(DECL_NAMESPACE_BINDINGS (ns)->end ());
for (hash_table<named_decl_hash>::iterator iter
(DECL_NAMESPACE_BINDINGS (ns)->begin ()); iter != end; ++iter)
- maybe_add_fuzzy_binding (vec, *iter, kind);
+ {
+ tree binding = *iter;
+
+ if (TREE_CODE (binding) == MODULE_VECTOR)
+ {
+ bitmap imports = get_import_bitmap ();
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (binding);
+
+ if (tree bind = cluster->slots[MODULE_SLOT_CURRENT])
+ if (maybe_add_fuzzy_binding (vec, bind, kind))
+ continue;
+ /* Scan the imported bindings. */
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (binding);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER;
+ jx++)
+ {
+ /* Are we importing this module? */
+ if (unsigned base = cluster->indices[jx].base)
+ if (unsigned span = cluster->indices[jx].span)
+ do
+ if (bitmap_bit_p (imports, base))
+ goto found;
+ while (++base, --span);
+ continue;
+
+ found:;
+ /* Is it loaded? */
+ if (cluster->slots[jx].is_lazy ())
+ /* Let's not read in everything on the first
+ spello! **/
+ continue;
+ if (tree bind = cluster->slots[jx])
+ if (maybe_add_fuzzy_binding (vec, bind, kind))
+ break;
+ }
+ }
+ else
+ maybe_add_fuzzy_binding (vec, binding, kind);
+ }
+
vec.qsort ([] (const void *a_, const void *b_)
{
return strcmp (IDENTIFIER_POINTER (*(const tree *)a_),
@@ -6515,9 +7848,10 @@ lookup_name_1 (tree name, LOOK_where where, LOOK_want want)
found:;
- /* If we have a single function from a using decl, pull it out. */
- if (val && TREE_CODE (val) == OVERLOAD && !really_overloaded_fn (val))
- val = OVL_FUNCTION (val);
+ /* If we have a known type overload, pull it out. This can happen
+ for both using decls and unhidden functions. */
+ if (val && TREE_CODE (val) == OVERLOAD && TREE_TYPE (val) != unknown_type_node)
+ val = OVL_FIRST (val);
return val;
}
@@ -6622,31 +7956,115 @@ lookup_elaborated_type_1 (tree name, TAG_how how)
tree ns = b->this_entity;
if (tree *slot = find_namespace_slot (ns, name))
{
- /* If this is the kind of thing we're looking for, we're done. */
- if (tree type = MAYBE_STAT_TYPE (*slot))
- {
- if (how != TAG_how::HIDDEN_FRIEND)
- /* No longer hidden. */
- STAT_TYPE_HIDDEN_P (*slot) = false;
+ tree bind = *slot;
+ if (TREE_CODE (bind) == MODULE_VECTOR)
+ bind = MODULE_VECTOR_CLUSTER (bind, 0).slots[MODULE_SLOT_CURRENT];
- return type;
- }
- else if (tree decl = MAYBE_STAT_DECL (*slot))
+ if (bind)
{
- if (qualify_lookup (decl, LOOK_want::TYPE))
+ /* If this is the kind of thing we're looking for, we're done. */
+ if (tree type = MAYBE_STAT_TYPE (bind))
+ {
+ if (how != TAG_how::HIDDEN_FRIEND)
+ /* No longer hidden. */
+ STAT_TYPE_HIDDEN_P (*slot) = false;
+
+ return type;
+ }
+ else if (tree decl = MAYBE_STAT_DECL (bind))
{
- if (how != TAG_how::HIDDEN_FRIEND && STAT_HACK_P (*slot)
- && STAT_DECL_HIDDEN_P (*slot))
+ if (qualify_lookup (decl, LOOK_want::TYPE))
{
- if (STAT_TYPE (*slot))
- STAT_DECL_HIDDEN_P (*slot) = false;
- else
- /* There is no type, just remove the stat
- hack. */
- *slot = decl;
+ if (how != TAG_how::HIDDEN_FRIEND && STAT_HACK_P (bind)
+ && STAT_DECL_HIDDEN_P (bind))
+ {
+ if (STAT_TYPE (bind))
+ STAT_DECL_HIDDEN_P (bind) = false;
+ else
+ {
+ /* There is no type, just remove the stat
+ hack. */
+ if (*slot == bind)
+ *slot = decl;
+ else
+ MODULE_VECTOR_CLUSTER (bind, 0)
+ .slots[MODULE_SLOT_CURRENT] = decl;
+ }
+ }
+ return decl;
}
+ }
+ }
+
+ if (TREE_CODE (*slot) == MODULE_VECTOR)
+ {
+ /* We could be redeclaring a global module entity, (from GMF
+ or header unit), or from another partition, or
+ specializing an imported template. */
+ bitmap imports = get_import_bitmap ();
+ module_cluster *cluster = MODULE_VECTOR_CLUSTER_BASE (*slot);
+
+ /* Scan the imported bindings. */
+ unsigned ix = MODULE_VECTOR_NUM_CLUSTERS (*slot);
+ if (MODULE_VECTOR_SLOTS_PER_CLUSTER == MODULE_SLOTS_FIXED)
+ {
+ ix--;
+ cluster++;
+ }
+
+ /* Do this in forward order, so we load modules in an order
+ the user expects. */
+ for (; ix--; cluster++)
+ for (unsigned jx = 0; jx != MODULE_VECTOR_SLOTS_PER_CLUSTER; jx++)
+ {
+ /* Are we importing this module? */
+ if (unsigned base = cluster->indices[jx].base)
+ if (unsigned span = cluster->indices[jx].span)
+ do
+ if (bitmap_bit_p (imports, base))
+ goto found;
+ while (++base, --span);
+ continue;
+
+ found:;
+ /* Is it loaded? */
+ if (cluster->slots[jx].is_lazy ())
+ {
+ gcc_assert (cluster->indices[jx].span == 1);
+ lazy_load_binding (cluster->indices[jx].base,
+ ns, name, &cluster->slots[jx]);
+ }
+ tree bind = cluster->slots[jx];
+ if (!bind)
+ /* Load errors could mean there's nothing here. */
+ continue;
+
+ /* Extract what we can see from here. If there's no
+ stat_hack, then everything was exported. */
+ tree type = NULL_TREE;
+
+ /* If no stat hack, everything is visible. */
+ if (STAT_HACK_P (bind))
+ {
+ if (STAT_TYPE_VISIBLE_P (bind))
+ type = STAT_TYPE (bind);
+ bind = STAT_VISIBLE (bind);
+ }
+
+ if (type && qualify_lookup (type, LOOK_want::TYPE))
+ return type;
+
+ if (bind && qualify_lookup (bind, LOOK_want::TYPE))
+ return bind;
+ }
- return decl;
+ if (!module_purview_p ())
+ {
+ /* We're in the global module, perhaps there's a tag
+ there? */
+ // FIXME: This isn't quite right, if we find something
+ // here, from the language PoV we're not supposed to
+ // know it?
}
}
}
@@ -6778,7 +8196,6 @@ do_pushtag (tree name, tree type, TAG_how how)
if (identifier_type_value_1 (name) != type)
{
tree tdef;
- int in_class = 0;
tree context = TYPE_CONTEXT (type);
if (! context)
@@ -6809,13 +8226,10 @@ do_pushtag (tree name, tree type, TAG_how how)
if (!context)
context = current_namespace;
- if (b->kind == sk_class
- || (b->kind == sk_template_parms
- && b->level_chain->kind == sk_class))
- in_class = 1;
-
tdef = create_implicit_typedef (name, type);
DECL_CONTEXT (tdef) = FROB_CONTEXT (context);
+ set_originating_module (tdef);
+
decl = maybe_process_template_type_declaration
(type, how == TAG_how::HIDDEN_FRIEND, b);
if (decl == error_mark_node)
@@ -6853,9 +8267,6 @@ do_pushtag (tree name, tree type, TAG_how how)
}
}
- if (! in_class)
- set_identifier_type_value_with_scope (name, tdef, b);
-
TYPE_CONTEXT (type) = DECL_CONTEXT (decl);
/* If this is a local class, keep track of it. We need this
@@ -7140,8 +8551,6 @@ do_push_nested_namespace (tree ns)
else
{
do_push_nested_namespace (CP_DECL_CONTEXT (ns));
- gcc_checking_assert
- (find_namespace_value (current_namespace, DECL_NAME (ns)) == ns);
resume_scope (NAMESPACE_LEVEL (ns));
current_namespace = ns;
}
@@ -7163,10 +8572,10 @@ do_pop_nested_namespace (tree ns)
do_pop_from_top_level ();
}
-/* Add TARGET to USINGS, if it does not already exist there.
- We used to build the complete graph of usings at this point, from
- the POV of the source namespaces. Now we build that as we perform
- the unqualified search. */
+/* Add TARGET to USINGS, if it does not already exist there. We used
+ to build the complete graph of usings at this point, from the POV
+ of the source namespaces. Now we build that as we perform the
+ unqualified search. */
static void
add_using_namespace (vec<tree, va_gc> *&usings, tree target)
@@ -7273,6 +8682,85 @@ push_inline_namespaces (tree ns)
return count;
}
+/* SLOT is the (possibly empty) binding slot for NAME in CTX.
+ Reuse or create a namespace NAME. NAME is null for the anonymous
+ namespace. */
+
+static tree
+reuse_namespace (tree *slot, tree ctx, tree name)
+{
+ if (modules_p () && *slot && TREE_PUBLIC (ctx) && name)
+ {
+ /* Public namespace. Shared. */
+ tree *global_slot = slot;
+ if (TREE_CODE (*slot) == MODULE_VECTOR)
+ global_slot = get_fixed_binding_slot (slot, name,
+ MODULE_SLOT_GLOBAL, false);
+
+ for (ovl_iterator iter (*global_slot); iter; ++iter)
+ {
+ tree decl = *iter;
+
+ if (TREE_CODE (decl) == NAMESPACE_DECL && !DECL_NAMESPACE_ALIAS (decl))
+ return decl;
+ }
+ }
+ return NULL_TREE;
+}
+
+static tree
+make_namespace (tree ctx, tree name, location_t loc, bool inline_p)
+{
+ /* Create the namespace. */
+ tree ns = build_lang_decl (NAMESPACE_DECL, name, void_type_node);
+ DECL_SOURCE_LOCATION (ns) = loc;
+ SCOPE_DEPTH (ns) = SCOPE_DEPTH (ctx) + 1;
+ if (!SCOPE_DEPTH (ns))
+ /* We only allow depth 255. */
+ sorry ("cannot nest more than %d namespaces", SCOPE_DEPTH (ctx));
+ DECL_CONTEXT (ns) = FROB_CONTEXT (ctx);
+
+ if (!name)
+ /* It's possible we'll need to give anon-namespaces in different
+ header-unit imports distinct names. If so, I think those
+ names can be unique to this TU -- use the module index? */
+ SET_DECL_ASSEMBLER_NAME (ns, anon_identifier);
+ else if (TREE_PUBLIC (ctx))
+ TREE_PUBLIC (ns) = true;
+
+ if (inline_p)
+ DECL_NAMESPACE_INLINE_P (ns) = true;
+
+ return ns;
+}
+
+static void
+make_namespace_finish (tree ns, tree *slot, bool from_import = false)
+{
+ if (modules_p () && TREE_PUBLIC (ns) && (from_import || *slot != ns))
+ {
+ /* Merge into global slot. */
+ tree *gslot = get_fixed_binding_slot (slot, DECL_NAME (ns),
+ MODULE_SLOT_GLOBAL, true);
+ *gslot = ns;
+ }
+
+ /* NS was newly created, finish off making it. */
+ tree ctx = CP_DECL_CONTEXT (ns);
+ cp_binding_level *scope = ggc_cleared_alloc<cp_binding_level> ();
+ scope->this_entity = ns;
+ scope->more_cleanups_ok = true;
+ scope->kind = sk_namespace;
+ scope->level_chain = NAMESPACE_LEVEL (ctx);
+ NAMESPACE_LEVEL (ns) = scope;
+
+ if (DECL_NAMESPACE_INLINE_P (ns))
+ vec_safe_push (DECL_NAMESPACE_INLINEES (ctx), ns);
+
+ if (DECL_NAMESPACE_INLINE_P (ns) || !DECL_NAME (ns))
+ emit_debug_info_using_namespace (ctx, ns, true);
+}
+
/* Push into the scope of the NAME namespace. If NAME is NULL_TREE,
then we enter an anonymous namespace. If MAKE_INLINE is true, then
we create an inline namespace (it is up to the caller to check upon
@@ -7349,59 +8837,80 @@ push_namespace (tree name, bool make_inline)
}
}
- bool new_ns = false;
if (ns)
- /* DR2061. NS might be a member of an inline namespace. We
- need to push into those namespaces. */
- count += push_inline_namespaces (CP_DECL_CONTEXT (ns));
+ {
+ /* DR2061. NS might be a member of an inline namespace. We
+ need to push into those namespaces. */
+ if (modules_p ())
+ {
+ for (tree parent, ctx = ns; ctx != current_namespace;
+ ctx = parent)
+ {
+ parent = CP_DECL_CONTEXT (ctx);
+
+ tree bind = *find_namespace_slot (parent, DECL_NAME (ctx), false);
+ if (bind != ctx)
+ {
+ mc_slot &slot
+ = MODULE_VECTOR_CLUSTER (bind, 0).slots[MODULE_SLOT_CURRENT];
+ gcc_checking_assert (!(tree)slot || (tree)slot == ctx);
+ slot = ctx;
+ }
+ }
+ }
+
+ count += push_inline_namespaces (CP_DECL_CONTEXT (ns));
+ if (DECL_SOURCE_LOCATION (ns) == BUILTINS_LOCATION)
+ /* It's not builtin now. */
+ DECL_SOURCE_LOCATION (ns) = input_location;
+ }
else
{
- ns = build_lang_decl (NAMESPACE_DECL, name, void_type_node);
- SCOPE_DEPTH (ns) = SCOPE_DEPTH (current_namespace) + 1;
- if (!SCOPE_DEPTH (ns))
- /* We only allow depth 255. */
- sorry ("cannot nest more than %d namespaces",
- SCOPE_DEPTH (current_namespace));
- DECL_CONTEXT (ns) = FROB_CONTEXT (current_namespace);
- new_ns = true;
+ /* Before making a new namespace, see if we already have one in
+ the existing partitions of the current namespace. */
+ tree *slot = find_namespace_slot (current_namespace, name, false);
+ if (slot)
+ ns = reuse_namespace (slot, current_namespace, name);
+ if (!ns)
+ ns = make_namespace (current_namespace, name,
+ input_location, make_inline);
if (pushdecl (ns) == error_mark_node)
ns = NULL_TREE;
else
{
- if (!name)
- {
- SET_DECL_ASSEMBLER_NAME (ns, anon_identifier);
-
- if (!make_inline)
- add_using_namespace (current_binding_level->using_directives,
- ns);
- }
- else if (TREE_PUBLIC (current_namespace))
- TREE_PUBLIC (ns) = 1;
-
- if (make_inline)
+ /* finish up making the namespace. */
+ add_decl_to_level (NAMESPACE_LEVEL (current_namespace), ns);
+ if (!slot)
{
- DECL_NAMESPACE_INLINE_P (ns) = true;
- vec_safe_push (DECL_NAMESPACE_INLINEES (current_namespace), ns);
+ slot = find_namespace_slot (current_namespace, name);
+ /* This should find the slot created by pushdecl. */
+ gcc_checking_assert (slot && *slot == ns);
}
+ make_namespace_finish (ns, slot);
- if (!name || make_inline)
- emit_debug_info_using_namespace (current_namespace, ns, true);
+ /* Add the anon using-directive here, we don't do it in
+ make_namespace_finish. */
+ if (!DECL_NAMESPACE_INLINE_P (ns) && !name)
+ add_using_namespace (current_binding_level->using_directives, ns);
}
}
if (ns)
{
+ /* A public namespace is exported only if explicitly marked, or
+ it contains exported entities. */
+ if (!DECL_MODULE_EXPORT_P (ns) && TREE_PUBLIC (ns)
+ && module_exporting_p ())
+ implicitly_export_namespace (ns);
+
if (make_inline && !DECL_NAMESPACE_INLINE_P (ns))
{
- error ("inline namespace must be specified at initial definition");
+ error_at (input_location,
+ "inline namespace must be specified at initial definition");
inform (DECL_SOURCE_LOCATION (ns), "%qD defined here", ns);
}
- if (new_ns)
- begin_scope (sk_namespace, ns);
- else
- resume_scope (NAMESPACE_LEVEL (ns));
+ resume_scope (NAMESPACE_LEVEL (ns));
current_namespace = ns;
count++;
}
@@ -7425,6 +8934,66 @@ pop_namespace (void)
timevar_cond_stop (TV_NAME_LOOKUP, subtime);
}
+// FIXME: Something is not correct about the VISIBLE_P handling. We
+// need to insert this namespace into
+// (a) the GLOBAL or PARTITION slot, if it is TREE_PUBLIC
+// (b) The importing module's slot (always)
+// (c) Do we need to put it in the CURRENT slot? This is the
+// confused piece.
+
+tree
+add_imported_namespace (tree ctx, tree name, unsigned origin, location_t loc,
+ bool visible_p, bool inline_p)
+{
+ gcc_checking_assert (origin);
+ tree *slot = find_namespace_slot (ctx, name, true);
+ tree decl = reuse_namespace (slot, ctx, name);
+ if (!decl)
+ {
+ decl = make_namespace (ctx, name, loc, inline_p);
+ DECL_MODULE_IMPORT_P (decl) = true;
+ make_namespace_finish (decl, slot, true);
+ }
+ else if (DECL_NAMESPACE_INLINE_P (decl) != inline_p)
+ {
+ error_at (loc, "%s namespace %qD conflicts with reachable definition",
+ inline_p ? "inline" : "non-inline", decl);
+ inform (DECL_SOURCE_LOCATION (decl), "reachable %s definition here",
+ inline_p ? "non-inline" : "inline");
+ }
+
+ if (TREE_PUBLIC (decl) && TREE_CODE (*slot) == MODULE_VECTOR)
+ {
+ /* See if we can extend the final slot. */
+ module_cluster *last = MODULE_VECTOR_CLUSTER_LAST (*slot);
+ gcc_checking_assert (last->indices[0].span);
+ unsigned jx = MODULE_VECTOR_SLOTS_PER_CLUSTER;
+
+ while (--jx)
+ if (last->indices[jx].span)
+ break;
+ tree final = last->slots[jx];
+ if (visible_p == !STAT_HACK_P (final)
+ && MAYBE_STAT_DECL (final) == decl
+ && last->indices[jx].base + last->indices[jx].span == origin
+ && (MODULE_VECTOR_NUM_CLUSTERS (*slot) > 1
+ || (MODULE_VECTOR_SLOTS_PER_CLUSTER > MODULE_SLOTS_FIXED
+ && jx >= MODULE_SLOTS_FIXED)))
+ {
+ last->indices[jx].span++;
+ return decl;
+ }
+ }
+
+ /* Append a new slot. */
+ tree *mslot = &(tree &)*append_imported_binding_slot (slot, name, origin);
+
+ gcc_assert (!*mslot);
+ *mslot = visible_p ? decl : stat_hack (decl, NULL_TREE);
+
+ return decl;
+}
+
/* External entry points for do_{push_to/pop_from}_top_level. */
void
@@ -7580,8 +9149,8 @@ maybe_save_operator_binding (tree e)
/* Do this for lambdas and code that will emit a CMI. In a module's
GMF we don't yet know whether there will be a CMI. */
- if (!current_lambda_expr ())
- return;
+ if (!module_has_cmi_p () && !global_purview_p () && !current_lambda_expr())
+ return;
tree fnname = ovl_op_identifier (false, TREE_CODE (e));
if (!fnname)
diff --git c/gcc/cp/name-lookup.h w/gcc/cp/name-lookup.h
index 6d18539e730..db228bdcfdc 100644
--- c/gcc/cp/name-lookup.h
+++ w/gcc/cp/name-lookup.h
@@ -68,6 +68,46 @@ struct GTY(()) cxx_saved_binding {
tree real_type_value;
};
+/* To support lazy module loading, we squirrel away a section number
+ for unloaded bindings. We rely on pointers being aligned and
+ setting the bottom bit to mark a lazy value.
+ GTY doesn't like an array of union, so hve a containing struct. */
+
+struct GTY(()) mc_slot {
+ union GTY((desc ("%1.is_lazy ()"))) mc_slot_lazy {
+ tree GTY((tag ("false"))) binding;
+ } u;
+
+ operator tree & ()
+ {
+ gcc_checking_assert (!is_lazy ());
+ return u.binding;
+ }
+ mc_slot &operator= (tree t)
+ {
+ u.binding = t;
+ return *this;
+ }
+ bool is_lazy () const
+ {
+ return bool (uintptr_t (u.binding) & 1);
+ }
+ void set_lazy (unsigned snum)
+ {
+ gcc_checking_assert (!u.binding);
+ u.binding = tree (uintptr_t ((snum << 1) | 1));
+ }
+ void or_lazy (unsigned snum)
+ {
+ gcc_checking_assert (is_lazy ());
+ u.binding = tree (uintptr_t (u.binding) | (snum << 1));
+ }
+ unsigned get_lazy () const
+ {
+ gcc_checking_assert (is_lazy ());
+ return unsigned (uintptr_t (u.binding) >> 1);
+ }
+};
extern tree identifier_type_value (tree);
extern void set_identifier_type_value (tree, tree);
@@ -338,7 +378,7 @@ extern tree *find_member_slot (tree klass, tree name);
extern tree *add_member_slot (tree klass, tree name);
extern void resort_type_member_vec (void *, void *,
gt_pointer_operator, void *);
-extern void set_class_bindings (tree, unsigned extra = 0);
+extern vec<tree, va_gc> *set_class_bindings (tree, int extra = 0);
extern void insert_late_enum_def_bindings (tree, tree);
extern tree innermost_non_namespace_value (tree);
extern cxx_binding *outer_binding (tree, cxx_binding *, bool);
@@ -361,4 +401,33 @@ extern void maybe_save_operator_binding (tree);
extern void push_operator_bindings (void);
extern void discard_operator_bindings (tree);
+/* Lower level interface for modules. */
+extern tree *mergeable_namespace_slots (tree ns, tree name, bool is_global,
+ tree *mvec);
+extern void add_mergeable_namespace_entity (tree *slot, tree decl);
+extern tree lookup_class_binding (tree ctx, tree name);
+extern bool import_module_binding (tree ctx, tree name, unsigned mod,
+ unsigned snum);
+extern bool set_module_binding (tree ctx, tree name, unsigned mod,
+ int mod_glob_flag,
+ tree value, tree type, tree visible);
+extern void add_module_decl (tree ctx, tree name, tree decl);
+
+enum WMB_Flags
+{
+ WMB_None = 0,
+ WMB_Dups = 1 << 0,
+ WMB_Export = 1 << 1,
+ WMB_Using = 1 << 2,
+ WMB_Hidden = 1 << 3,
+};
+
+extern unsigned walk_module_binding (tree binding, bitmap partitions,
+ bool (*)(tree decl, WMB_Flags, void *data),
+ void *data);
+extern tree add_imported_namespace (tree ctx, tree name, unsigned module,
+ location_t, bool visible_p, bool inline_p);
+extern void note_pending_specializations (tree ns, tree name, bool is_header);
+extern void load_pending_specializations (tree ns, tree name);
+extern const char *get_cxx_dialect_name (enum cxx_dialect dialect);
#endif /* GCC_CP_NAME_LOOKUP_H */