// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Lowers the AST to the HIR. //! //! Since the AST and HIR are fairly similar, this is mostly a simple procedure, //! much like a fold. Where lowering involves a bit more work things get more //! interesting and there are some invariants you should know about. These mostly //! concern spans and ids. //! //! Spans are assigned to AST nodes during parsing and then are modified during //! expansion to indicate the origin of a node and the process it went through //! being expanded. Ids are assigned to AST nodes just before lowering. //! //! For the simpler lowering steps, ids and spans should be preserved. Unlike //! expansion we do not preserve the process of lowering in the spans, so spans //! should not be modified here. When creating a new node (as opposed to //! 'folding' an existing one), then you create a new id using `next_id()`. //! //! You must ensure that ids are unique. That means that you should only use the //! id from an AST node in a single HIR node (you can assume that AST node ids //! are unique). Every new node must have a unique id. Avoid cloning HIR nodes. //! If you do, you must then set the new node's id to a fresh one. //! //! Spans are used for error messages and for tools to map semantics back to //! source code. It is therefore not as important with spans as ids to be strict //! about use (you can't break the compiler by screwing up a span). Obviously, a //! HIR node can only have a single span. But multiple nodes can have the same //! span and spans don't need to be kept in order, etc. Where code is preserved //! by lowering, it should have the same span as in the AST. Where HIR nodes are //! new it is probably best to give a span for the whole AST node being lowered. //! All nodes should have real spans, don't use dummy spans. Tools are likely to //! get confused if the spans from leaf AST nodes occur in multiple places //! in the HIR, especially for multiple identifiers. use dep_graph::DepGraph; use hir::{self, ParamName}; use hir::HirVec; use hir::map::{DefKey, DefPathData, Definitions}; use hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CRATE_DEF_INDEX}; use hir::def::{Def, PathResolution, PerNS}; use hir::GenericArg; use lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES, ELIDED_LIFETIMES_IN_PATHS}; use middle::cstore::CrateStore; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::thin_vec::ThinVec; use session::Session; use session::config::nightly_options; use util::common::FN_OUTPUT_NAME; use util::nodemap::{DefIdMap, NodeMap}; use std::collections::BTreeMap; use std::fmt::Debug; use std::mem; use smallvec::SmallVec; use syntax::attr; use syntax::ast; use syntax::ast::*; use syntax::errors; use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::feature_gate::{emit_feature_err, GateIssue}; use syntax::print::pprust; use syntax::ptr::P; use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned}; use syntax::std_inject; use syntax::symbol::{keywords, Symbol}; use syntax::tokenstream::{Delimited, TokenStream, TokenTree}; use syntax::parse::token::Token; use syntax::visit::{self, Visitor}; use syntax_pos::{Span, MultiSpan}; const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF; pub struct LoweringContext<'a> { crate_root: Option<&'static str>, // Use to assign ids to hir nodes that do not directly correspond to an ast node sess: &'a Session, cstore: &'a dyn CrateStore, resolver: &'a mut dyn Resolver, /// The items being lowered are collected here. items: BTreeMap, trait_items: BTreeMap, impl_items: BTreeMap, bodies: BTreeMap, exported_macros: Vec, trait_impls: BTreeMap>, trait_auto_impl: BTreeMap, is_generator: bool, catch_scopes: Vec, loop_scopes: Vec, is_in_loop_condition: bool, is_in_trait_impl: bool, /// What to do when we encounter either an "anonymous lifetime /// reference". The term "anonymous" is meant to encompass both /// `'_` lifetimes as well as fully elided cases where nothing is /// written at all (e.g., `&T` or `std::cell::Ref`). anonymous_lifetime_mode: AnonymousLifetimeMode, // Used to create lifetime definitions from in-band lifetime usages. // e.g. `fn foo(x: &'x u8) -> &'x u8` to `fn foo<'x>(x: &'x u8) -> &'x u8` // When a named lifetime is encountered in a function or impl header and // has not been defined // (i.e. it doesn't appear in the in_scope_lifetimes list), it is added // to this list. The results of this list are then added to the list of // lifetime definitions in the corresponding impl or function generics. lifetimes_to_define: Vec<(Span, ParamName)>, // Whether or not in-band lifetimes are being collected. This is used to // indicate whether or not we're in a place where new lifetimes will result // in in-band lifetime definitions, such a function or an impl header, // including implicit lifetimes from `impl_header_lifetime_elision`. is_collecting_in_band_lifetimes: bool, // Currently in-scope lifetimes defined in impl headers, fn headers, or HRTB. // When `is_collectin_in_band_lifetimes` is true, each lifetime is checked // against this list to see if it is already in-scope, or if a definition // needs to be created for it. in_scope_lifetimes: Vec, type_def_lifetime_params: DefIdMap, current_hir_id_owner: Vec<(DefIndex, u32)>, item_local_id_counters: NodeMap, node_id_to_hir_id: IndexVec, } pub trait Resolver { /// Resolve a path generated by the lowerer when expanding `for`, `if let`, etc. fn resolve_hir_path( &mut self, path: &ast::Path, args: Option>, is_value: bool, ) -> hir::Path; /// Obtain the resolution for a node id fn get_resolution(&mut self, id: NodeId) -> Option; /// Obtain the possible resolutions for the given `use` statement. fn get_import(&mut self, id: NodeId) -> PerNS>; /// We must keep the set of definitions up to date as we add nodes that weren't in the AST. /// This should only return `None` during testing. fn definitions(&mut self) -> &mut Definitions; /// Given suffix ["b","c","d"], creates a HIR path for `[::crate_root]::b::c::d` and resolves /// it based on `is_value`. fn resolve_str_path( &mut self, span: Span, crate_root: Option<&str>, components: &[&str], args: Option>, is_value: bool, ) -> hir::Path; } #[derive(Debug)] enum ImplTraitContext<'a> { /// Treat `impl Trait` as shorthand for a new universal generic parameter. /// Example: `fn foo(x: impl Debug)`, where `impl Debug` is conceptually /// equivalent to a fresh universal parameter like `fn foo(x: T)`. /// /// Newly generated parameters should be inserted into the given `Vec`. Universal(&'a mut Vec), /// Treat `impl Trait` as shorthand for a new existential parameter. /// Example: `fn foo() -> impl Debug`, where `impl Debug` is conceptually /// equivalent to a fresh existential parameter like `existential type T; fn foo() -> T`. /// /// We optionally store a `DefId` for the parent item here so we can look up necessary /// information later. It is `None` when no information about the context should be stored, /// e.g. for consts and statics. Existential(Option), /// `impl Trait` is not accepted in this position. Disallowed(ImplTraitPosition), } /// Position in which `impl Trait` is disallowed. Used for error reporting. #[derive(Debug, Copy, Clone, PartialEq, Eq)] enum ImplTraitPosition { Binding, Other, } impl<'a> ImplTraitContext<'a> { #[inline] fn disallowed() -> Self { ImplTraitContext::Disallowed(ImplTraitPosition::Other) } fn reborrow(&'b mut self) -> ImplTraitContext<'b> { use self::ImplTraitContext::*; match self { Universal(params) => Universal(params), Existential(did) => Existential(*did), Disallowed(pos) => Disallowed(*pos), } } } pub fn lower_crate( sess: &Session, cstore: &dyn CrateStore, dep_graph: &DepGraph, krate: &Crate, resolver: &mut dyn Resolver, ) -> hir::Crate { // We're constructing the HIR here; we don't care what we will // read, since we haven't even constructed the *input* to // incr. comp. yet. dep_graph.assert_ignored(); LoweringContext { crate_root: std_inject::injected_crate_name(), sess, cstore, resolver, items: BTreeMap::new(), trait_items: BTreeMap::new(), impl_items: BTreeMap::new(), bodies: BTreeMap::new(), trait_impls: BTreeMap::new(), trait_auto_impl: BTreeMap::new(), exported_macros: Vec::new(), catch_scopes: Vec::new(), loop_scopes: Vec::new(), is_in_loop_condition: false, anonymous_lifetime_mode: AnonymousLifetimeMode::PassThrough, type_def_lifetime_params: DefIdMap(), current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)], item_local_id_counters: NodeMap(), node_id_to_hir_id: IndexVec::new(), is_generator: false, is_in_trait_impl: false, lifetimes_to_define: Vec::new(), is_collecting_in_band_lifetimes: false, in_scope_lifetimes: Vec::new(), }.lower_crate(krate) } #[derive(Copy, Clone, PartialEq)] enum ParamMode { /// Any path in a type context. Explicit, /// The `module::Type` in `module::Type::method` in an expression. Optional, } #[derive(Debug)] struct LoweredNodeId { node_id: NodeId, hir_id: hir::HirId, } enum ParenthesizedGenericArgs { Ok, Warn, Err, } /// What to do when we encounter an **anonymous** lifetime /// reference. Anonymous lifetime references come in two flavors. You /// have implicit, or fully elided, references to lifetimes, like the /// one in `&T` or `Ref`, and you have `'_` lifetimes, like `&'_ T` /// or `Ref<'_, T>`. These often behave the same, but not always: /// /// - certain usages of implicit references are deprecated, like /// `Ref`, and we sometimes just give hard errors in those cases /// as well. /// - for object bounds there is a difference: `Box` is not /// the same as `Box`. /// /// We describe the effects of the various modes in terms of three cases: /// /// - **Modern** -- includes all uses of `'_`, but also the lifetime arg /// of a `&` (e.g., the missing lifetime in something like `&T`) /// - **Dyn Bound** -- if you have something like `Box`, /// there is an elided lifetime bound (`Box`). These /// elided bounds follow special rules. Note that this only covers /// cases where *nothing* is written; the `'_` in `Box` is a case of "modern" elision. /// - **Deprecated** -- this coverse cases like `Ref`, where the lifetime /// parameter to ref is completely elided. `Ref<'_, T>` would be the modern, /// non-deprecated equivalent. /// /// Currently, the handling of lifetime elision is somewhat spread out /// between HIR lowering and -- as described below -- the /// `resolve_lifetime` module. Often we "fallthrough" to that code by generating /// an "elided" or "underscore" lifetime name. In the future, we probably want to move /// everything into HIR lowering. #[derive(Copy, Clone)] enum AnonymousLifetimeMode { /// For **Modern** cases, create a new anonymous region parameter /// and reference that. /// /// For **Dyn Bound** cases, pass responsibility to /// `resolve_lifetime` code. /// /// For **Deprecated** cases, report an error. CreateParameter, /// Give a hard error when either `&` or `'_` is written. Used to /// rule out things like `where T: Foo<'_>`. Does not imply an /// error on default object bounds (e.g., `Box`). ReportError, /// Pass responsibility to `resolve_lifetime` code for all cases. PassThrough, } struct ImplTraitTypeIdVisitor<'a> { ids: &'a mut SmallVec<[hir::ItemId; 1]> } impl<'a, 'b> Visitor<'a> for ImplTraitTypeIdVisitor<'b> { fn visit_ty(&mut self, ty: &'a Ty) { match ty.node { | TyKind::Typeof(_) | TyKind::BareFn(_) => return, TyKind::ImplTrait(id, _) => self.ids.push(hir::ItemId { id }), _ => {}, } visit::walk_ty(self, ty); } fn visit_path_segment( &mut self, path_span: Span, path_segment: &'v PathSegment, ) { if let Some(ref p) = path_segment.args { if let GenericArgs::Parenthesized(_) = **p { return; } } visit::walk_path_segment(self, path_span, path_segment) } } impl<'a> LoweringContext<'a> { fn lower_crate(mut self, c: &Crate) -> hir::Crate { /// Full-crate AST visitor that inserts into a fresh /// `LoweringContext` any information that may be /// needed from arbitrary locations in the crate. /// E.g. The number of lifetime generic parameters /// declared for every type and trait definition. struct MiscCollector<'lcx, 'interner: 'lcx> { lctx: &'lcx mut LoweringContext<'interner>, } impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> { fn visit_item(&mut self, item: &'lcx Item) { self.lctx.allocate_hir_id_counter(item.id, item); match item.node { ItemKind::Struct(_, ref generics) | ItemKind::Union(_, ref generics) | ItemKind::Enum(_, ref generics) | ItemKind::Ty(_, ref generics) | ItemKind::Existential(_, ref generics) | ItemKind::Trait(_, _, ref generics, ..) => { let def_id = self.lctx.resolver.definitions().local_def_id(item.id); let count = generics .params .iter() .filter(|param| match param.kind { ast::GenericParamKind::Lifetime { .. } => true, _ => false, }) .count(); self.lctx.type_def_lifetime_params.insert(def_id, count); } _ => {} } visit::walk_item(self, item); } fn visit_trait_item(&mut self, item: &'lcx TraitItem) { self.lctx.allocate_hir_id_counter(item.id, item); visit::walk_trait_item(self, item); } fn visit_impl_item(&mut self, item: &'lcx ImplItem) { self.lctx.allocate_hir_id_counter(item.id, item); visit::walk_impl_item(self, item); } } struct ItemLowerer<'lcx, 'interner: 'lcx> { lctx: &'lcx mut LoweringContext<'interner>, } impl<'lcx, 'interner> ItemLowerer<'lcx, 'interner> { fn with_trait_impl_ref(&mut self, trait_impl_ref: &Option, f: F) where F: FnOnce(&mut Self), { let old = self.lctx.is_in_trait_impl; self.lctx.is_in_trait_impl = if let &None = trait_impl_ref { false } else { true }; f(self); self.lctx.is_in_trait_impl = old; } } impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { fn visit_item(&mut self, item: &'lcx Item) { let mut item_lowered = true; self.lctx.with_hir_id_owner(item.id, |lctx| { if let Some(hir_item) = lctx.lower_item(item) { lctx.items.insert(item.id, hir_item); } else { item_lowered = false; } }); if item_lowered { let item_generics = match self.lctx.items.get(&item.id).unwrap().node { hir::ItemKind::Impl(_, _, _, ref generics, ..) | hir::ItemKind::Trait(_, _, ref generics, ..) => { generics.params.clone() } _ => HirVec::new(), }; self.lctx.with_parent_impl_lifetime_defs(&item_generics, |this| { let this = &mut ItemLowerer { lctx: this }; if let ItemKind::Impl(.., ref opt_trait_ref, _, _) = item.node { this.with_trait_impl_ref(opt_trait_ref, |this| { visit::walk_item(this, item) }); } else { visit::walk_item(this, item); } }); } } fn visit_trait_item(&mut self, item: &'lcx TraitItem) { self.lctx.with_hir_id_owner(item.id, |lctx| { let id = hir::TraitItemId { node_id: item.id }; let hir_item = lctx.lower_trait_item(item); lctx.trait_items.insert(id, hir_item); }); visit::walk_trait_item(self, item); } fn visit_impl_item(&mut self, item: &'lcx ImplItem) { self.lctx.with_hir_id_owner(item.id, |lctx| { let id = hir::ImplItemId { node_id: item.id }; let hir_item = lctx.lower_impl_item(item); lctx.impl_items.insert(id, hir_item); }); visit::walk_impl_item(self, item); } } self.lower_node_id(CRATE_NODE_ID); debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID); visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c); visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c); let module = self.lower_mod(&c.module); let attrs = self.lower_attrs(&c.attrs); let body_ids = body_ids(&self.bodies); self.resolver .definitions() .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id); hir::Crate { module, attrs, span: c.span, exported_macros: hir::HirVec::from(self.exported_macros), items: self.items, trait_items: self.trait_items, impl_items: self.impl_items, bodies: self.bodies, body_ids, trait_impls: self.trait_impls, trait_auto_impl: self.trait_auto_impl, } } fn allocate_hir_id_counter(&mut self, owner: NodeId, debug: &T) -> LoweredNodeId { if self.item_local_id_counters.insert(owner, 0).is_some() { bug!( "Tried to allocate item_local_id_counter for {:?} twice", debug ); } // Always allocate the first HirId for the owner itself self.lower_node_id_with_owner(owner, owner) } fn lower_node_id_generic(&mut self, ast_node_id: NodeId, alloc_hir_id: F) -> LoweredNodeId where F: FnOnce(&mut Self) -> hir::HirId, { if ast_node_id == DUMMY_NODE_ID { return LoweredNodeId { node_id: DUMMY_NODE_ID, hir_id: hir::DUMMY_HIR_ID, }; } let min_size = ast_node_id.as_usize() + 1; if min_size > self.node_id_to_hir_id.len() { self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID); } let existing_hir_id = self.node_id_to_hir_id[ast_node_id]; if existing_hir_id == hir::DUMMY_HIR_ID { // Generate a new HirId let hir_id = alloc_hir_id(self); self.node_id_to_hir_id[ast_node_id] = hir_id; LoweredNodeId { node_id: ast_node_id, hir_id, } } else { LoweredNodeId { node_id: ast_node_id, hir_id: existing_hir_id, } } } fn with_hir_id_owner(&mut self, owner: NodeId, f: F) -> T where F: FnOnce(&mut Self) -> T, { let counter = self.item_local_id_counters .insert(owner, HIR_ID_COUNTER_LOCKED) .unwrap_or_else(|| panic!("No item_local_id_counters entry for {:?}", owner)); let def_index = self.resolver.definitions().opt_def_index(owner).unwrap(); self.current_hir_id_owner.push((def_index, counter)); let ret = f(self); let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap(); debug_assert!(def_index == new_def_index); debug_assert!(new_counter >= counter); let prev = self.item_local_id_counters .insert(owner, new_counter) .unwrap(); debug_assert!(prev == HIR_ID_COUNTER_LOCKED); ret } /// This method allocates a new HirId for the given NodeId and stores it in /// the LoweringContext's NodeId => HirId map. /// Take care not to call this method if the resulting HirId is then not /// actually used in the HIR, as that would trigger an assertion in the /// HirIdValidator later on, which makes sure that all NodeIds got mapped /// properly. Calling the method twice with the same NodeId is fine though. fn lower_node_id(&mut self, ast_node_id: NodeId) -> LoweredNodeId { self.lower_node_id_generic(ast_node_id, |this| { let &mut (def_index, ref mut local_id_counter) = this.current_hir_id_owner.last_mut().unwrap(); let local_id = *local_id_counter; *local_id_counter += 1; hir::HirId { owner: def_index, local_id: hir::ItemLocalId(local_id), } }) } fn lower_node_id_with_owner(&mut self, ast_node_id: NodeId, owner: NodeId) -> LoweredNodeId { self.lower_node_id_generic(ast_node_id, |this| { let local_id_counter = this .item_local_id_counters .get_mut(&owner) .expect("called lower_node_id_with_owner before allocate_hir_id_counter"); let local_id = *local_id_counter; // We want to be sure not to modify the counter in the map while it // is also on the stack. Otherwise we'll get lost updates when writing // back from the stack to the map. debug_assert!(local_id != HIR_ID_COUNTER_LOCKED); *local_id_counter += 1; let def_index = this .resolver .definitions() .opt_def_index(owner) .expect("You forgot to call `create_def_with_parent` or are lowering node ids \ that do not belong to the current owner"); hir::HirId { owner: def_index, local_id: hir::ItemLocalId(local_id), } }) } fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>) -> hir::BodyId { let body = hir::Body { arguments: decl.map_or(hir_vec![], |decl| { decl.inputs.iter().map(|x| self.lower_arg(x)).collect() }), is_generator: self.is_generator, value, }; let id = body.id(); self.bodies.insert(id, body); id } fn next_id(&mut self) -> LoweredNodeId { self.lower_node_id(self.sess.next_node_id()) } fn expect_full_def(&mut self, id: NodeId) -> Def { self.resolver.get_resolution(id).map_or(Def::Err, |pr| { if pr.unresolved_segments() != 0 { bug!("path not fully resolved: {:?}", pr); } pr.base_def() }) } fn expect_full_def_from_use(&mut self, id: NodeId) -> impl Iterator { self.resolver.get_import(id).present_items().map(|pr| { if pr.unresolved_segments() != 0 { bug!("path not fully resolved: {:?}", pr); } pr.base_def() }) } fn diagnostic(&self) -> &errors::Handler { self.sess.diagnostic() } fn str_to_ident(&self, s: &'static str) -> Ident { Ident::with_empty_ctxt(Symbol::gensym(s)) } fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span { let mark = Mark::fresh(Mark::root()); mark.set_expn_info(source_map::ExpnInfo { call_site: span, def_site: Some(span), format: source_map::CompilerDesugaring(reason), allow_internal_unstable: true, allow_internal_unsafe: false, local_inner_macros: false, edition: source_map::hygiene::default_edition(), }); span.with_ctxt(SyntaxContext::empty().apply_mark(mark)) } fn with_anonymous_lifetime_mode( &mut self, anonymous_lifetime_mode: AnonymousLifetimeMode, op: impl FnOnce(&mut Self) -> R, ) -> R { let old_anonymous_lifetime_mode = self.anonymous_lifetime_mode; self.anonymous_lifetime_mode = anonymous_lifetime_mode; let result = op(self); self.anonymous_lifetime_mode = old_anonymous_lifetime_mode; result } /// Creates a new hir::GenericParam for every new lifetime and /// type parameter encountered while evaluating `f`. Definitions /// are created with the parent provided. If no `parent_id` is /// provided, no definitions will be returned. /// /// Presuming that in-band lifetimes are enabled, then /// `self.anonymous_lifetime_mode` will be updated to match the /// argument while `f` is running (and restored afterwards). fn collect_in_band_defs( &mut self, parent_id: DefId, anonymous_lifetime_mode: AnonymousLifetimeMode, f: F, ) -> (Vec, T) where F: FnOnce(&mut LoweringContext<'_>) -> (Vec, T), { assert!(!self.is_collecting_in_band_lifetimes); assert!(self.lifetimes_to_define.is_empty()); let old_anonymous_lifetime_mode = self.anonymous_lifetime_mode; self.anonymous_lifetime_mode = anonymous_lifetime_mode; self.is_collecting_in_band_lifetimes = true; let (in_band_ty_params, res) = f(self); self.is_collecting_in_band_lifetimes = false; self.anonymous_lifetime_mode = old_anonymous_lifetime_mode; let lifetimes_to_define = self.lifetimes_to_define.split_off(0); let params = lifetimes_to_define .into_iter() .map(|(span, hir_name)| { let def_node_id = self.next_id().node_id; // Get the name we'll use to make the def-path. Note // that collisions are ok here and this shouldn't // really show up for end-user. let (str_name, kind) = match hir_name { ParamName::Plain(ident) => ( ident.as_interned_str(), hir::LifetimeParamKind::InBand, ), ParamName::Fresh(_) => ( keywords::UnderscoreLifetime.name().as_interned_str(), hir::LifetimeParamKind::Elided, ), ParamName::Error => ( keywords::UnderscoreLifetime.name().as_interned_str(), hir::LifetimeParamKind::Error, ), }; // Add a definition for the in-band lifetime def self.resolver.definitions().create_def_with_parent( parent_id.index, def_node_id, DefPathData::LifetimeParam(str_name), DefIndexAddressSpace::High, Mark::root(), span, ); hir::GenericParam { id: def_node_id, name: hir_name, attrs: hir_vec![], bounds: hir_vec![], span, pure_wrt_drop: false, kind: hir::GenericParamKind::Lifetime { kind } } }) .chain(in_band_ty_params.into_iter()) .collect(); (params, res) } /// When there is a reference to some lifetime `'a`, and in-band /// lifetimes are enabled, then we want to push that lifetime into /// the vector of names to define later. In that case, it will get /// added to the appropriate generics. fn maybe_collect_in_band_lifetime(&mut self, ident: Ident) { if !self.is_collecting_in_band_lifetimes { return; } if !self.sess.features_untracked().in_band_lifetimes { return; } if self.in_scope_lifetimes.contains(&ident.modern()) { return; } let hir_name = ParamName::Plain(ident); if self.lifetimes_to_define.iter() .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) { return; } self.lifetimes_to_define.push((ident.span, hir_name)); } /// When we have either an elided or `'_` lifetime in an impl /// header, we convert it to an in-band lifetime. fn collect_fresh_in_band_lifetime(&mut self, span: Span) -> ParamName { assert!(self.is_collecting_in_band_lifetimes); let index = self.lifetimes_to_define.len(); let hir_name = ParamName::Fresh(index); self.lifetimes_to_define.push((span, hir_name)); hir_name } // Evaluates `f` with the lifetimes in `params` in-scope. // This is used to track which lifetimes have already been defined, and // which are new in-band lifetimes that need to have a definition created // for them. fn with_in_scope_lifetime_defs(&mut self, params: &[GenericParam], f: F) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { let old_len = self.in_scope_lifetimes.len(); let lt_def_names = params.iter().filter_map(|param| match param.kind { GenericParamKind::Lifetime { .. } => Some(param.ident.modern()), _ => None, }); self.in_scope_lifetimes.extend(lt_def_names); let res = f(self); self.in_scope_lifetimes.truncate(old_len); res } // Same as the method above, but accepts `hir::GenericParam`s // instead of `ast::GenericParam`s. // This should only be used with generics that have already had their // in-band lifetimes added. In practice, this means that this function is // only used when lowering a child item of a trait or impl. fn with_parent_impl_lifetime_defs(&mut self, params: &HirVec, f: F ) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { let old_len = self.in_scope_lifetimes.len(); let lt_def_names = params.iter().filter_map(|param| match param.kind { hir::GenericParamKind::Lifetime { .. } => Some(param.name.ident().modern()), _ => None, }); self.in_scope_lifetimes.extend(lt_def_names); let res = f(self); self.in_scope_lifetimes.truncate(old_len); res } /// Appends in-band lifetime defs and argument-position `impl /// Trait` defs to the existing set of generics. /// /// Presuming that in-band lifetimes are enabled, then /// `self.anonymous_lifetime_mode` will be updated to match the /// argument while `f` is running (and restored afterwards). fn add_in_band_defs( &mut self, generics: &Generics, parent_id: DefId, anonymous_lifetime_mode: AnonymousLifetimeMode, f: F, ) -> (hir::Generics, T) where F: FnOnce(&mut LoweringContext<'_>, &mut Vec) -> T, { let (in_band_defs, (mut lowered_generics, res)) = self.with_in_scope_lifetime_defs( &generics.params, |this| { this.collect_in_band_defs(parent_id, anonymous_lifetime_mode, |this| { let mut params = Vec::new(); let generics = this.lower_generics( generics, ImplTraitContext::Universal(&mut params), ); let res = f(this, &mut params); (params, (generics, res)) }) }, ); lowered_generics.params = lowered_generics .params .iter() .cloned() .chain(in_band_defs) .collect(); (lowered_generics, res) } fn with_catch_scope(&mut self, catch_id: NodeId, f: F) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { let len = self.catch_scopes.len(); self.catch_scopes.push(catch_id); let result = f(self); assert_eq!( len + 1, self.catch_scopes.len(), "catch scopes should be added and removed in stack order" ); self.catch_scopes.pop().unwrap(); result } fn make_async_expr( &mut self, capture_clause: CaptureBy, closure_node_id: NodeId, ret_ty: Option<&Ty>, body: impl FnOnce(&mut LoweringContext<'_>) -> hir::Expr, ) -> hir::ExprKind { let prev_is_generator = mem::replace(&mut self.is_generator, true); let body_expr = body(self); let span = body_expr.span; let output = match ret_ty { Some(ty) => FunctionRetTy::Ty(P(ty.clone())), None => FunctionRetTy::Default(span), }; let decl = FnDecl { inputs: vec![], output, variadic: false }; let body_id = self.record_body(body_expr, Some(&decl)); self.is_generator = prev_is_generator; let capture_clause = self.lower_capture_clause(capture_clause); let closure_hir_id = self.lower_node_id(closure_node_id).hir_id; let decl = self.lower_fn_decl(&decl, None, /* impl trait allowed */ false, None); let generator = hir::Expr { id: closure_node_id, hir_id: closure_hir_id, node: hir::ExprKind::Closure(capture_clause, decl, body_id, span, Some(hir::GeneratorMovability::Static)), span, attrs: ThinVec::new(), }; let unstable_span = self.allow_internal_unstable(CompilerDesugaringKind::Async, span); let gen_future = self.expr_std_path( unstable_span, &["future", "from_generator"], None, ThinVec::new()); hir::ExprKind::Call(P(gen_future), hir_vec![generator]) } fn lower_body(&mut self, decl: Option<&FnDecl>, f: F) -> hir::BodyId where F: FnOnce(&mut LoweringContext<'_>) -> hir::Expr, { let prev = mem::replace(&mut self.is_generator, false); let result = f(self); let r = self.record_body(result, decl); self.is_generator = prev; return r; } fn with_loop_scope(&mut self, loop_id: NodeId, f: F) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { // We're no longer in the base loop's condition; we're in another loop. let was_in_loop_condition = self.is_in_loop_condition; self.is_in_loop_condition = false; let len = self.loop_scopes.len(); self.loop_scopes.push(loop_id); let result = f(self); assert_eq!( len + 1, self.loop_scopes.len(), "Loop scopes should be added and removed in stack order" ); self.loop_scopes.pop().unwrap(); self.is_in_loop_condition = was_in_loop_condition; result } fn with_loop_condition_scope(&mut self, f: F) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { let was_in_loop_condition = self.is_in_loop_condition; self.is_in_loop_condition = true; let result = f(self); self.is_in_loop_condition = was_in_loop_condition; result } fn with_new_scopes(&mut self, f: F) -> T where F: FnOnce(&mut LoweringContext<'_>) -> T, { let was_in_loop_condition = self.is_in_loop_condition; self.is_in_loop_condition = false; let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new()); let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new()); let ret = f(self); self.catch_scopes = catch_scopes; self.loop_scopes = loop_scopes; self.is_in_loop_condition = was_in_loop_condition; ret } fn def_key(&mut self, id: DefId) -> DefKey { if id.is_local() { self.resolver.definitions().def_key(id.index) } else { self.cstore.def_key(id) } } fn lower_label(&mut self, label: Option