1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs}; 2 3 use rustc_ast::attr; 4 use rustc_ast::ptr::P as AstP; 5 use rustc_ast::*; 6 use rustc_data_structures::stack::ensure_sufficient_stack; 7 use rustc_data_structures::thin_vec::ThinVec; 8 use rustc_errors::struct_span_err; 9 use rustc_hir as hir; 10 use rustc_hir::def::Res; 11 use rustc_hir::definitions::DefPathData; 12 use rustc_session::parse::feature_err; 13 use rustc_span::hygiene::ExpnId; 14 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned}; 15 use rustc_span::symbol::{sym, Ident, Symbol}; 16 use rustc_span::DUMMY_SP; 17 18 impl<'hir> LoweringContext<'_, 'hir> { lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>]19 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] { 20 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x))) 21 } 22 lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir>23 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> { 24 self.arena.alloc(self.lower_expr_mut(e)) 25 } 26 lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir>27 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> { 28 ensure_sufficient_stack(|| { 29 let kind = match e.kind { 30 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)), 31 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), 32 ExprKind::ConstBlock(ref anon_const) => { 33 let anon_const = self.lower_anon_const(anon_const); 34 hir::ExprKind::ConstBlock(anon_const) 35 } 36 ExprKind::Repeat(ref expr, ref count) => { 37 let expr = self.lower_expr(expr); 38 let count = self.lower_anon_const(count); 39 hir::ExprKind::Repeat(expr, count) 40 } 41 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)), 42 ExprKind::Call(ref f, ref args) => { 43 if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) { 44 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args) 45 } else { 46 let f = self.lower_expr(f); 47 hir::ExprKind::Call(f, self.lower_exprs(args)) 48 } 49 } 50 ExprKind::MethodCall(ref seg, ref args, span) => { 51 let hir_seg = self.arena.alloc(self.lower_path_segment( 52 e.span, 53 seg, 54 ParamMode::Optional, 55 0, 56 ParenthesizedGenericArgs::Err, 57 ImplTraitContext::disallowed(), 58 )); 59 let args = self.lower_exprs(args); 60 hir::ExprKind::MethodCall( 61 hir_seg, 62 self.lower_span(seg.ident.span), 63 args, 64 self.lower_span(span), 65 ) 66 } 67 ExprKind::Binary(binop, ref lhs, ref rhs) => { 68 let binop = self.lower_binop(binop); 69 let lhs = self.lower_expr(lhs); 70 let rhs = self.lower_expr(rhs); 71 hir::ExprKind::Binary(binop, lhs, rhs) 72 } 73 ExprKind::Unary(op, ref ohs) => { 74 let op = self.lower_unop(op); 75 let ohs = self.lower_expr(ohs); 76 hir::ExprKind::Unary(op, ohs) 77 } 78 ExprKind::Lit(ref l) => { 79 hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone())) 80 } 81 ExprKind::Cast(ref expr, ref ty) => { 82 let expr = self.lower_expr(expr); 83 let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); 84 hir::ExprKind::Cast(expr, ty) 85 } 86 ExprKind::Type(ref expr, ref ty) => { 87 let expr = self.lower_expr(expr); 88 let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); 89 hir::ExprKind::Type(expr, ty) 90 } 91 ExprKind::AddrOf(k, m, ref ohs) => { 92 let ohs = self.lower_expr(ohs); 93 hir::ExprKind::AddrOf(k, m, ohs) 94 } 95 ExprKind::Let(ref pat, ref scrutinee, span) => hir::ExprKind::Let( 96 self.lower_pat(pat), 97 self.lower_expr(scrutinee), 98 self.lower_span(span), 99 ), 100 ExprKind::If(ref cond, ref then, ref else_opt) => { 101 self.lower_expr_if(cond, then, else_opt.as_deref()) 102 } 103 ExprKind::While(ref cond, ref body, opt_label) => { 104 self.with_loop_scope(e.id, |this| { 105 let span = 106 this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None); 107 this.lower_expr_while_in_loop_scope(span, cond, body, opt_label) 108 }) 109 } 110 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| { 111 hir::ExprKind::Loop( 112 this.lower_block(body, false), 113 this.lower_label(opt_label), 114 hir::LoopSource::Loop, 115 DUMMY_SP, 116 ) 117 }), 118 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body), 119 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match( 120 self.lower_expr(expr), 121 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), 122 hir::MatchSource::Normal, 123 ), 124 ExprKind::Async(capture_clause, closure_node_id, ref block) => self 125 .make_async_expr( 126 capture_clause, 127 closure_node_id, 128 None, 129 block.span, 130 hir::AsyncGeneratorKind::Block, 131 |this| this.with_new_scopes(|this| this.lower_block_expr(block)), 132 ), 133 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr), 134 ExprKind::Closure( 135 capture_clause, 136 asyncness, 137 movability, 138 ref decl, 139 ref body, 140 fn_decl_span, 141 ) => { 142 if let Async::Yes { closure_id, .. } = asyncness { 143 self.lower_expr_async_closure( 144 capture_clause, 145 closure_id, 146 decl, 147 body, 148 fn_decl_span, 149 ) 150 } else { 151 self.lower_expr_closure( 152 capture_clause, 153 movability, 154 decl, 155 body, 156 fn_decl_span, 157 ) 158 } 159 } 160 ExprKind::Block(ref blk, opt_label) => { 161 let opt_label = self.lower_label(opt_label); 162 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) 163 } 164 ExprKind::Assign(ref el, ref er, span) => { 165 self.lower_expr_assign(el, er, span, e.span) 166 } 167 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp( 168 self.lower_binop(op), 169 self.lower_expr(el), 170 self.lower_expr(er), 171 ), 172 ExprKind::Field(ref el, ident) => { 173 hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident)) 174 } 175 ExprKind::Index(ref el, ref er) => { 176 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er)) 177 } 178 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => { 179 self.lower_expr_range_closed(e.span, e1, e2) 180 } 181 ExprKind::Range(ref e1, ref e2, lims) => { 182 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims) 183 } 184 ExprKind::Underscore => { 185 self.sess 186 .struct_span_err( 187 e.span, 188 "in expressions, `_` can only be used on the left-hand side of an assignment", 189 ) 190 .span_label(e.span, "`_` not allowed here") 191 .emit(); 192 hir::ExprKind::Err 193 } 194 ExprKind::Path(ref qself, ref path) => { 195 let qpath = self.lower_qpath( 196 e.id, 197 qself, 198 path, 199 ParamMode::Optional, 200 ImplTraitContext::disallowed(), 201 ); 202 hir::ExprKind::Path(qpath) 203 } 204 ExprKind::Break(opt_label, ref opt_expr) => { 205 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x)); 206 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr) 207 } 208 ExprKind::Continue(opt_label) => { 209 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label)) 210 } 211 ExprKind::Ret(ref e) => { 212 let e = e.as_ref().map(|x| self.lower_expr(x)); 213 hir::ExprKind::Ret(e) 214 } 215 ExprKind::InlineAsm(ref asm) => { 216 hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm)) 217 } 218 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm), 219 ExprKind::Struct(ref se) => { 220 let rest = match &se.rest { 221 StructRest::Base(e) => Some(self.lower_expr(e)), 222 StructRest::Rest(sp) => { 223 self.sess 224 .struct_span_err(*sp, "base expression required after `..`") 225 .span_label(*sp, "add a base expression here") 226 .emit(); 227 Some(&*self.arena.alloc(self.expr_err(*sp))) 228 } 229 StructRest::None => None, 230 }; 231 hir::ExprKind::Struct( 232 self.arena.alloc(self.lower_qpath( 233 e.id, 234 &se.qself, 235 &se.path, 236 ParamMode::Optional, 237 ImplTraitContext::disallowed(), 238 )), 239 self.arena 240 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))), 241 rest, 242 ) 243 } 244 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), 245 ExprKind::Err => hir::ExprKind::Err, 246 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr), 247 ExprKind::Paren(ref ex) => { 248 let mut ex = self.lower_expr_mut(ex); 249 // Include parens in span, but only if it is a super-span. 250 if e.span.contains(ex.span) { 251 ex.span = self.lower_span(e.span); 252 } 253 // Merge attributes into the inner expression. 254 if !e.attrs.is_empty() { 255 let old_attrs = 256 self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]); 257 self.attrs.insert( 258 ex.hir_id.local_id, 259 &*self.arena.alloc_from_iter( 260 e.attrs 261 .iter() 262 .map(|a| self.lower_attr(a)) 263 .chain(old_attrs.iter().cloned()), 264 ), 265 ); 266 } 267 return ex; 268 } 269 270 // Desugar `ExprForLoop` 271 // from: `[opt_ident]: for <pat> in <head> <body>` 272 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => { 273 return self.lower_expr_for(e, pat, head, body, opt_label); 274 } 275 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span), 276 }; 277 278 let hir_id = self.lower_node_id(e.id); 279 self.lower_attrs(hir_id, &e.attrs); 280 hir::Expr { hir_id, kind, span: self.lower_span(e.span) } 281 }) 282 } 283 lower_unop(&mut self, u: UnOp) -> hir::UnOp284 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { 285 match u { 286 UnOp::Deref => hir::UnOp::Deref, 287 UnOp::Not => hir::UnOp::Not, 288 UnOp::Neg => hir::UnOp::Neg, 289 } 290 } 291 lower_binop(&mut self, b: BinOp) -> hir::BinOp292 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp { 293 Spanned { 294 node: match b.node { 295 BinOpKind::Add => hir::BinOpKind::Add, 296 BinOpKind::Sub => hir::BinOpKind::Sub, 297 BinOpKind::Mul => hir::BinOpKind::Mul, 298 BinOpKind::Div => hir::BinOpKind::Div, 299 BinOpKind::Rem => hir::BinOpKind::Rem, 300 BinOpKind::And => hir::BinOpKind::And, 301 BinOpKind::Or => hir::BinOpKind::Or, 302 BinOpKind::BitXor => hir::BinOpKind::BitXor, 303 BinOpKind::BitAnd => hir::BinOpKind::BitAnd, 304 BinOpKind::BitOr => hir::BinOpKind::BitOr, 305 BinOpKind::Shl => hir::BinOpKind::Shl, 306 BinOpKind::Shr => hir::BinOpKind::Shr, 307 BinOpKind::Eq => hir::BinOpKind::Eq, 308 BinOpKind::Lt => hir::BinOpKind::Lt, 309 BinOpKind::Le => hir::BinOpKind::Le, 310 BinOpKind::Ne => hir::BinOpKind::Ne, 311 BinOpKind::Ge => hir::BinOpKind::Ge, 312 BinOpKind::Gt => hir::BinOpKind::Gt, 313 }, 314 span: self.lower_span(b.span), 315 } 316 } 317 lower_legacy_const_generics( &mut self, mut f: Expr, args: Vec<AstP<Expr>>, legacy_args_idx: &[usize], ) -> hir::ExprKind<'hir>318 fn lower_legacy_const_generics( 319 &mut self, 320 mut f: Expr, 321 args: Vec<AstP<Expr>>, 322 legacy_args_idx: &[usize], 323 ) -> hir::ExprKind<'hir> { 324 let path = match f.kind { 325 ExprKind::Path(None, ref mut path) => path, 326 _ => unreachable!(), 327 }; 328 329 // Split the arguments into const generics and normal arguments 330 let mut real_args = vec![]; 331 let mut generic_args = vec![]; 332 for (idx, arg) in args.into_iter().enumerate() { 333 if legacy_args_idx.contains(&idx) { 334 let parent_def_id = self.current_hir_id_owner; 335 let node_id = self.resolver.next_node_id(); 336 337 // Add a definition for the in-band const def. 338 self.resolver.create_def( 339 parent_def_id, 340 node_id, 341 DefPathData::AnonConst, 342 ExpnId::root(), 343 arg.span, 344 ); 345 346 let anon_const = AnonConst { id: node_id, value: arg }; 347 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const))); 348 } else { 349 real_args.push(arg); 350 } 351 } 352 353 // Add generic args to the last element of the path. 354 let last_segment = path.segments.last_mut().unwrap(); 355 assert!(last_segment.args.is_none()); 356 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs { 357 span: DUMMY_SP, 358 args: generic_args, 359 }))); 360 361 // Now lower everything as normal. 362 let f = self.lower_expr(&f); 363 hir::ExprKind::Call(f, self.lower_exprs(&real_args)) 364 } 365 lower_expr_if( &mut self, cond: &Expr, then: &Block, else_opt: Option<&Expr>, ) -> hir::ExprKind<'hir>366 fn lower_expr_if( 367 &mut self, 368 cond: &Expr, 369 then: &Block, 370 else_opt: Option<&Expr>, 371 ) -> hir::ExprKind<'hir> { 372 let lowered_cond = self.lower_expr(cond); 373 let new_cond = self.manage_let_cond(lowered_cond); 374 let then_expr = self.lower_block_expr(then); 375 if let Some(rslt) = else_opt { 376 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt))) 377 } else { 378 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None) 379 } 380 } 381 382 // If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond` 383 // in a temporary block. manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir>384 fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> { 385 match cond.kind { 386 hir::ExprKind::Let(..) => cond, 387 _ => { 388 let span_block = 389 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None); 390 self.expr_drop_temps(span_block, cond, AttrVec::new()) 391 } 392 } 393 } 394 395 // We desugar: `'label: while $cond $body` into: 396 // 397 // ``` 398 // 'label: loop { 399 // if { let _t = $cond; _t } { 400 // $body 401 // } 402 // else { 403 // break; 404 // } 405 // } 406 // ``` 407 // 408 // Wrap in a construct equivalent to `{ let _t = $cond; _t }` 409 // to preserve drop semantics since `while $cond { ... }` does not 410 // let temporaries live outside of `cond`. lower_expr_while_in_loop_scope( &mut self, span: Span, cond: &Expr, body: &Block, opt_label: Option<Label>, ) -> hir::ExprKind<'hir>411 fn lower_expr_while_in_loop_scope( 412 &mut self, 413 span: Span, 414 cond: &Expr, 415 body: &Block, 416 opt_label: Option<Label>, 417 ) -> hir::ExprKind<'hir> { 418 let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond)); 419 let new_cond = self.manage_let_cond(lowered_cond); 420 let then = self.lower_block_expr(body); 421 let expr_break = self.expr_break(span, ThinVec::new()); 422 let stmt_break = self.stmt_expr(span, expr_break); 423 let else_blk = self.block_all(span, arena_vec![self; stmt_break], None); 424 let else_expr = self.arena.alloc(self.expr_block(else_blk, ThinVec::new())); 425 let if_kind = hir::ExprKind::If(new_cond, self.arena.alloc(then), Some(else_expr)); 426 let if_expr = self.expr(span, if_kind, ThinVec::new()); 427 let block = self.block_expr(self.arena.alloc(if_expr)); 428 let span = self.lower_span(span.with_hi(cond.span.hi())); 429 let opt_label = self.lower_label(opt_label); 430 hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span) 431 } 432 433 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`, 434 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }` 435 /// and save the block id to use it as a break target for desugaring of the `?` operator. lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir>436 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> { 437 self.with_catch_scope(body.id, |this| { 438 let mut block = this.lower_block_noalloc(body, true); 439 440 // Final expression of the block (if present) or `()` with span at the end of block 441 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() { 442 ( 443 this.mark_span_with_reason( 444 DesugaringKind::TryBlock, 445 expr.span, 446 this.allow_try_trait.clone(), 447 ), 448 expr, 449 ) 450 } else { 451 let try_span = this.mark_span_with_reason( 452 DesugaringKind::TryBlock, 453 this.sess.source_map().end_point(body.span), 454 this.allow_try_trait.clone(), 455 ); 456 457 (try_span, this.expr_unit(try_span)) 458 }; 459 460 let ok_wrapped_span = 461 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None); 462 463 // `::std::ops::Try::from_output($tail_expr)` 464 block.expr = Some(this.wrap_in_try_constructor( 465 hir::LangItem::TryTraitFromOutput, 466 try_span, 467 tail_expr, 468 ok_wrapped_span, 469 )); 470 471 hir::ExprKind::Block(this.arena.alloc(block), None) 472 }) 473 } 474 wrap_in_try_constructor( &mut self, lang_item: hir::LangItem, method_span: Span, expr: &'hir hir::Expr<'hir>, overall_span: Span, ) -> &'hir hir::Expr<'hir>475 fn wrap_in_try_constructor( 476 &mut self, 477 lang_item: hir::LangItem, 478 method_span: Span, 479 expr: &'hir hir::Expr<'hir>, 480 overall_span: Span, 481 ) -> &'hir hir::Expr<'hir> { 482 let constructor = 483 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new())); 484 self.expr_call(overall_span, constructor, std::slice::from_ref(expr)) 485 } 486 lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir>487 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> { 488 let pat = self.lower_pat(&arm.pat); 489 let guard = arm.guard.as_ref().map(|cond| { 490 if let ExprKind::Let(ref pat, ref scrutinee, _) = cond.kind { 491 hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee)) 492 } else { 493 hir::Guard::If(self.lower_expr(cond)) 494 } 495 }); 496 let hir_id = self.next_id(); 497 self.lower_attrs(hir_id, &arm.attrs); 498 hir::Arm { 499 hir_id, 500 pat, 501 guard, 502 body: self.lower_expr(&arm.body), 503 span: self.lower_span(arm.span), 504 } 505 } 506 507 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`. 508 /// 509 /// This results in: 510 /// 511 /// ```text 512 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> { 513 /// <body> 514 /// }) 515 /// ``` make_async_expr( &mut self, capture_clause: CaptureBy, closure_node_id: NodeId, ret_ty: Option<AstP<Ty>>, span: Span, async_gen_kind: hir::AsyncGeneratorKind, body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, ) -> hir::ExprKind<'hir>516 pub(super) fn make_async_expr( 517 &mut self, 518 capture_clause: CaptureBy, 519 closure_node_id: NodeId, 520 ret_ty: Option<AstP<Ty>>, 521 span: Span, 522 async_gen_kind: hir::AsyncGeneratorKind, 523 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, 524 ) -> hir::ExprKind<'hir> { 525 let output = match ret_ty { 526 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())), 527 None => hir::FnRetTy::DefaultReturn(self.lower_span(span)), 528 }; 529 530 // Resume argument type. We let the compiler infer this to simplify the lowering. It is 531 // fully constrained by `future::from_generator`. 532 let input_ty = hir::Ty { 533 hir_id: self.next_id(), 534 kind: hir::TyKind::Infer, 535 span: self.lower_span(span), 536 }; 537 538 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`. 539 let decl = self.arena.alloc(hir::FnDecl { 540 inputs: arena_vec![self; input_ty], 541 output, 542 c_variadic: false, 543 implicit_self: hir::ImplicitSelfKind::None, 544 }); 545 546 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering. 547 let (pat, task_context_hid) = self.pat_ident_binding_mode( 548 span, 549 Ident::with_dummy_span(sym::_task_context), 550 hir::BindingAnnotation::Mutable, 551 ); 552 let param = hir::Param { 553 hir_id: self.next_id(), 554 pat, 555 ty_span: self.lower_span(span), 556 span: self.lower_span(span), 557 }; 558 let params = arena_vec![self; param]; 559 560 let body_id = self.lower_body(move |this| { 561 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind)); 562 563 let old_ctx = this.task_context; 564 this.task_context = Some(task_context_hid); 565 let res = body(this); 566 this.task_context = old_ctx; 567 (params, res) 568 }); 569 570 // `static |_task_context| -> <ret_ty> { body }`: 571 let generator_kind = hir::ExprKind::Closure( 572 capture_clause, 573 decl, 574 body_id, 575 self.lower_span(span), 576 Some(hir::Movability::Static), 577 ); 578 let generator = hir::Expr { 579 hir_id: self.lower_node_id(closure_node_id), 580 kind: generator_kind, 581 span: self.lower_span(span), 582 }; 583 584 // `future::from_generator`: 585 let unstable_span = 586 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone()); 587 let gen_future = 588 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new()); 589 590 // `future::from_generator(generator)`: 591 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator]) 592 } 593 594 /// Desugar `<expr>.await` into: 595 /// ```rust 596 /// match <expr> { 597 /// mut pinned => loop { 598 /// match unsafe { ::std::future::Future::poll( 599 /// <::std::pin::Pin>::new_unchecked(&mut pinned), 600 /// ::std::future::get_context(task_context), 601 /// ) } { 602 /// ::std::task::Poll::Ready(result) => break result, 603 /// ::std::task::Poll::Pending => {} 604 /// } 605 /// task_context = yield (); 606 /// } 607 /// } 608 /// ``` lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir>609 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> { 610 match self.generator_kind { 611 Some(hir::GeneratorKind::Async(_)) => {} 612 Some(hir::GeneratorKind::Gen) | None => { 613 let mut err = struct_span_err!( 614 self.sess, 615 await_span, 616 E0728, 617 "`await` is only allowed inside `async` functions and blocks" 618 ); 619 err.span_label(await_span, "only allowed inside `async` functions and blocks"); 620 if let Some(item_sp) = self.current_item { 621 err.span_label(item_sp, "this is not `async`"); 622 } 623 err.emit(); 624 } 625 } 626 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None); 627 let gen_future_span = self.mark_span_with_reason( 628 DesugaringKind::Await, 629 await_span, 630 self.allow_gen_future.clone(), 631 ); 632 let expr = self.lower_expr(expr); 633 634 let pinned_ident = Ident::with_dummy_span(sym::pinned); 635 let (pinned_pat, pinned_pat_hid) = 636 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable); 637 638 let task_context_ident = Ident::with_dummy_span(sym::_task_context); 639 640 // unsafe { 641 // ::std::future::Future::poll( 642 // ::std::pin::Pin::new_unchecked(&mut pinned), 643 // ::std::future::get_context(task_context), 644 // ) 645 // } 646 let poll_expr = { 647 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid); 648 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned); 649 let task_context = if let Some(task_context_hid) = self.task_context { 650 self.expr_ident_mut(span, task_context_ident, task_context_hid) 651 } else { 652 // Use of `await` outside of an async context, we cannot use `task_context` here. 653 self.expr_err(span) 654 }; 655 let new_unchecked = self.expr_call_lang_item_fn_mut( 656 span, 657 hir::LangItem::PinNewUnchecked, 658 arena_vec![self; ref_mut_pinned], 659 ); 660 let get_context = self.expr_call_lang_item_fn_mut( 661 gen_future_span, 662 hir::LangItem::GetContext, 663 arena_vec![self; task_context], 664 ); 665 let call = self.expr_call_lang_item_fn( 666 span, 667 hir::LangItem::FuturePoll, 668 arena_vec![self; new_unchecked, get_context], 669 ); 670 self.arena.alloc(self.expr_unsafe(call)) 671 }; 672 673 // `::std::task::Poll::Ready(result) => break result` 674 let loop_node_id = self.resolver.next_node_id(); 675 let loop_hir_id = self.lower_node_id(loop_node_id); 676 let ready_arm = { 677 let x_ident = Ident::with_dummy_span(sym::result); 678 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident); 679 let x_expr = self.expr_ident(span, x_ident, x_pat_hid); 680 let ready_field = self.single_pat_field(span, x_pat); 681 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field); 682 let break_x = self.with_loop_scope(loop_node_id, move |this| { 683 let expr_break = 684 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr)); 685 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new())) 686 }); 687 self.arm(ready_pat, break_x) 688 }; 689 690 // `::std::task::Poll::Pending => {}` 691 let pending_arm = { 692 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]); 693 let empty_block = self.expr_block_empty(span); 694 self.arm(pending_pat, empty_block) 695 }; 696 697 let inner_match_stmt = { 698 let match_expr = self.expr_match( 699 span, 700 poll_expr, 701 arena_vec![self; ready_arm, pending_arm], 702 hir::MatchSource::AwaitDesugar, 703 ); 704 self.stmt_expr(span, match_expr) 705 }; 706 707 // task_context = yield (); 708 let yield_stmt = { 709 let unit = self.expr_unit(span); 710 let yield_expr = self.expr( 711 span, 712 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }), 713 ThinVec::new(), 714 ); 715 let yield_expr = self.arena.alloc(yield_expr); 716 717 if let Some(task_context_hid) = self.task_context { 718 let lhs = self.expr_ident(span, task_context_ident, task_context_hid); 719 let assign = self.expr( 720 span, 721 hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)), 722 AttrVec::new(), 723 ); 724 self.stmt_expr(span, assign) 725 } else { 726 // Use of `await` outside of an async context. Return `yield_expr` so that we can 727 // proceed with type checking. 728 self.stmt(span, hir::StmtKind::Semi(yield_expr)) 729 } 730 }; 731 732 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None); 733 734 // loop { .. } 735 let loop_expr = self.arena.alloc(hir::Expr { 736 hir_id: loop_hir_id, 737 kind: hir::ExprKind::Loop( 738 loop_block, 739 None, 740 hir::LoopSource::Loop, 741 self.lower_span(span), 742 ), 743 span: self.lower_span(span), 744 }); 745 746 // mut pinned => loop { ... } 747 let pinned_arm = self.arm(pinned_pat, loop_expr); 748 749 // match <expr> { 750 // mut pinned => loop { .. } 751 // } 752 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar) 753 } 754 lower_expr_closure( &mut self, capture_clause: CaptureBy, movability: Movability, decl: &FnDecl, body: &Expr, fn_decl_span: Span, ) -> hir::ExprKind<'hir>755 fn lower_expr_closure( 756 &mut self, 757 capture_clause: CaptureBy, 758 movability: Movability, 759 decl: &FnDecl, 760 body: &Expr, 761 fn_decl_span: Span, 762 ) -> hir::ExprKind<'hir> { 763 let (body_id, generator_option) = self.with_new_scopes(move |this| { 764 let prev = this.current_item; 765 this.current_item = Some(fn_decl_span); 766 let mut generator_kind = None; 767 let body_id = this.lower_fn_body(decl, |this| { 768 let e = this.lower_expr_mut(body); 769 generator_kind = this.generator_kind; 770 e 771 }); 772 let generator_option = 773 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability); 774 this.current_item = prev; 775 (body_id, generator_option) 776 }); 777 778 // Lower outside new scope to preserve `is_in_loop_condition`. 779 let fn_decl = self.lower_fn_decl(decl, None, false, None); 780 781 hir::ExprKind::Closure( 782 capture_clause, 783 fn_decl, 784 body_id, 785 self.lower_span(fn_decl_span), 786 generator_option, 787 ) 788 } 789 generator_movability_for_fn( &mut self, decl: &FnDecl, fn_decl_span: Span, generator_kind: Option<hir::GeneratorKind>, movability: Movability, ) -> Option<hir::Movability>790 fn generator_movability_for_fn( 791 &mut self, 792 decl: &FnDecl, 793 fn_decl_span: Span, 794 generator_kind: Option<hir::GeneratorKind>, 795 movability: Movability, 796 ) -> Option<hir::Movability> { 797 match generator_kind { 798 Some(hir::GeneratorKind::Gen) => { 799 if decl.inputs.len() > 1 { 800 struct_span_err!( 801 self.sess, 802 fn_decl_span, 803 E0628, 804 "too many parameters for a generator (expected 0 or 1 parameters)" 805 ) 806 .emit(); 807 } 808 Some(movability) 809 } 810 Some(hir::GeneratorKind::Async(_)) => { 811 panic!("non-`async` closure body turned `async` during lowering"); 812 } 813 None => { 814 if movability == Movability::Static { 815 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static") 816 .emit(); 817 } 818 None 819 } 820 } 821 } 822 lower_expr_async_closure( &mut self, capture_clause: CaptureBy, closure_id: NodeId, decl: &FnDecl, body: &Expr, fn_decl_span: Span, ) -> hir::ExprKind<'hir>823 fn lower_expr_async_closure( 824 &mut self, 825 capture_clause: CaptureBy, 826 closure_id: NodeId, 827 decl: &FnDecl, 828 body: &Expr, 829 fn_decl_span: Span, 830 ) -> hir::ExprKind<'hir> { 831 let outer_decl = 832 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) }; 833 834 let body_id = self.with_new_scopes(|this| { 835 // FIXME(cramertj): allow `async` non-`move` closures with arguments. 836 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() { 837 struct_span_err!( 838 this.sess, 839 fn_decl_span, 840 E0708, 841 "`async` non-`move` closures with parameters are not currently supported", 842 ) 843 .help( 844 "consider using `let` statements to manually capture \ 845 variables by reference before entering an `async move` closure", 846 ) 847 .emit(); 848 } 849 850 // Transform `async |x: u8| -> X { ... }` into 851 // `|x: u8| future_from_generator(|| -> X { ... })`. 852 let body_id = this.lower_fn_body(&outer_decl, |this| { 853 let async_ret_ty = 854 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None }; 855 let async_body = this.make_async_expr( 856 capture_clause, 857 closure_id, 858 async_ret_ty, 859 body.span, 860 hir::AsyncGeneratorKind::Closure, 861 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)), 862 ); 863 this.expr(fn_decl_span, async_body, ThinVec::new()) 864 }); 865 body_id 866 }); 867 868 // We need to lower the declaration outside the new scope, because we 869 // have to conserve the state of being inside a loop condition for the 870 // closure argument types. 871 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None); 872 873 hir::ExprKind::Closure( 874 capture_clause, 875 fn_decl, 876 body_id, 877 self.lower_span(fn_decl_span), 878 None, 879 ) 880 } 881 882 /// Destructure the LHS of complex assignments. 883 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`. lower_expr_assign( &mut self, lhs: &Expr, rhs: &Expr, eq_sign_span: Span, whole_span: Span, ) -> hir::ExprKind<'hir>884 fn lower_expr_assign( 885 &mut self, 886 lhs: &Expr, 887 rhs: &Expr, 888 eq_sign_span: Span, 889 whole_span: Span, 890 ) -> hir::ExprKind<'hir> { 891 // Return early in case of an ordinary assignment. 892 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool { 893 match &lhs.kind { 894 ExprKind::Array(..) 895 | ExprKind::Struct(..) 896 | ExprKind::Tup(..) 897 | ExprKind::Underscore => false, 898 // Check for tuple struct constructor. 899 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(), 900 ExprKind::Paren(e) => { 901 match e.kind { 902 // We special-case `(..)` for consistency with patterns. 903 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false, 904 _ => is_ordinary(lower_ctx, e), 905 } 906 } 907 _ => true, 908 } 909 } 910 if is_ordinary(self, lhs) { 911 return hir::ExprKind::Assign( 912 self.lower_expr(lhs), 913 self.lower_expr(rhs), 914 self.lower_span(eq_sign_span), 915 ); 916 } 917 if !self.sess.features_untracked().destructuring_assignment { 918 let mut err = feature_err( 919 &self.sess.parse_sess, 920 sym::destructuring_assignment, 921 eq_sign_span, 922 "destructuring assignments are unstable", 923 ); 924 err.span_label(lhs.span, "cannot assign to this expression"); 925 if self.is_in_loop_condition { 926 err.span_suggestion_verbose( 927 lhs.span.shrink_to_lo(), 928 "you might have meant to use pattern destructuring", 929 "let ".to_string(), 930 rustc_errors::Applicability::MachineApplicable, 931 ); 932 } 933 err.emit(); 934 } 935 936 let mut assignments = vec![]; 937 938 // The LHS becomes a pattern: `(lhs1, lhs2)`. 939 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments); 940 let rhs = self.lower_expr(rhs); 941 942 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`. 943 let destructure_let = self.stmt_let_pat( 944 None, 945 whole_span, 946 Some(rhs), 947 pat, 948 hir::LocalSource::AssignDesugar(self.lower_span(eq_sign_span)), 949 ); 950 951 // `a = lhs1; b = lhs2;`. 952 let stmts = self 953 .arena 954 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter())); 955 956 // Wrap everything in a block. 957 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None) 958 } 959 960 /// If the given expression is a path to a tuple struct, returns that path. 961 /// It is not a complete check, but just tries to reject most paths early 962 /// if they are not tuple structs. 963 /// Type checking will take care of the full validation later. extract_tuple_struct_path<'a>( &mut self, expr: &'a Expr, ) -> Option<(&'a Option<QSelf>, &'a Path)>964 fn extract_tuple_struct_path<'a>( 965 &mut self, 966 expr: &'a Expr, 967 ) -> Option<(&'a Option<QSelf>, &'a Path)> { 968 if let ExprKind::Path(qself, path) = &expr.kind { 969 // Does the path resolve to something disallowed in a tuple struct/variant pattern? 970 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) { 971 if partial_res.unresolved_segments() == 0 972 && !partial_res.base_res().expected_in_tuple_struct_pat() 973 { 974 return None; 975 } 976 } 977 return Some((qself, path)); 978 } 979 None 980 } 981 982 /// Convert the LHS of a destructuring assignment to a pattern. 983 /// Each sub-assignment is recorded in `assignments`. destructure_assign( &mut self, lhs: &Expr, eq_sign_span: Span, assignments: &mut Vec<hir::Stmt<'hir>>, ) -> &'hir hir::Pat<'hir>984 fn destructure_assign( 985 &mut self, 986 lhs: &Expr, 987 eq_sign_span: Span, 988 assignments: &mut Vec<hir::Stmt<'hir>>, 989 ) -> &'hir hir::Pat<'hir> { 990 self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments)) 991 } 992 destructure_assign_mut( &mut self, lhs: &Expr, eq_sign_span: Span, assignments: &mut Vec<hir::Stmt<'hir>>, ) -> hir::Pat<'hir>993 fn destructure_assign_mut( 994 &mut self, 995 lhs: &Expr, 996 eq_sign_span: Span, 997 assignments: &mut Vec<hir::Stmt<'hir>>, 998 ) -> hir::Pat<'hir> { 999 match &lhs.kind { 1000 // Underscore pattern. 1001 ExprKind::Underscore => { 1002 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild); 1003 } 1004 // Slice patterns. 1005 ExprKind::Array(elements) => { 1006 let (pats, rest) = 1007 self.destructure_sequence(elements, "slice", eq_sign_span, assignments); 1008 let slice_pat = if let Some((i, span)) = rest { 1009 let (before, after) = pats.split_at(i); 1010 hir::PatKind::Slice( 1011 before, 1012 Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))), 1013 after, 1014 ) 1015 } else { 1016 hir::PatKind::Slice(pats, None, &[]) 1017 }; 1018 return self.pat_without_dbm(lhs.span, slice_pat); 1019 } 1020 // Tuple structs. 1021 ExprKind::Call(callee, args) => { 1022 if let Some((qself, path)) = self.extract_tuple_struct_path(callee) { 1023 let (pats, rest) = self.destructure_sequence( 1024 args, 1025 "tuple struct or variant", 1026 eq_sign_span, 1027 assignments, 1028 ); 1029 let qpath = self.lower_qpath( 1030 callee.id, 1031 qself, 1032 path, 1033 ParamMode::Optional, 1034 ImplTraitContext::disallowed(), 1035 ); 1036 // Destructure like a tuple struct. 1037 let tuple_struct_pat = 1038 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0)); 1039 return self.pat_without_dbm(lhs.span, tuple_struct_pat); 1040 } 1041 } 1042 // Structs. 1043 ExprKind::Struct(se) => { 1044 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| { 1045 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments); 1046 hir::PatField { 1047 hir_id: self.next_id(), 1048 ident: self.lower_ident(f.ident), 1049 pat, 1050 is_shorthand: f.is_shorthand, 1051 span: self.lower_span(f.span), 1052 } 1053 })); 1054 let qpath = self.lower_qpath( 1055 lhs.id, 1056 &se.qself, 1057 &se.path, 1058 ParamMode::Optional, 1059 ImplTraitContext::disallowed(), 1060 ); 1061 let fields_omitted = match &se.rest { 1062 StructRest::Base(e) => { 1063 self.sess 1064 .struct_span_err( 1065 e.span, 1066 "functional record updates are not allowed in destructuring \ 1067 assignments", 1068 ) 1069 .span_suggestion( 1070 e.span, 1071 "consider removing the trailing pattern", 1072 String::new(), 1073 rustc_errors::Applicability::MachineApplicable, 1074 ) 1075 .emit(); 1076 true 1077 } 1078 StructRest::Rest(_) => true, 1079 StructRest::None => false, 1080 }; 1081 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted); 1082 return self.pat_without_dbm(lhs.span, struct_pat); 1083 } 1084 // Tuples. 1085 ExprKind::Tup(elements) => { 1086 let (pats, rest) = 1087 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments); 1088 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0)); 1089 return self.pat_without_dbm(lhs.span, tuple_pat); 1090 } 1091 ExprKind::Paren(e) => { 1092 // We special-case `(..)` for consistency with patterns. 1093 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind { 1094 let tuple_pat = hir::PatKind::Tuple(&[], Some(0)); 1095 return self.pat_without_dbm(lhs.span, tuple_pat); 1096 } else { 1097 return self.destructure_assign_mut(e, eq_sign_span, assignments); 1098 } 1099 } 1100 _ => {} 1101 } 1102 // Treat all other cases as normal lvalue. 1103 let ident = Ident::new(sym::lhs, self.lower_span(lhs.span)); 1104 let (pat, binding) = self.pat_ident_mut(lhs.span, ident); 1105 let ident = self.expr_ident(lhs.span, ident, binding); 1106 let assign = 1107 hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span)); 1108 let expr = self.expr(lhs.span, assign, ThinVec::new()); 1109 assignments.push(self.stmt_expr(lhs.span, expr)); 1110 pat 1111 } 1112 1113 /// Destructure a sequence of expressions occurring on the LHS of an assignment. 1114 /// Such a sequence occurs in a tuple (struct)/slice. 1115 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it 1116 /// exists. 1117 /// Each sub-assignment is recorded in `assignments`. destructure_sequence( &mut self, elements: &[AstP<Expr>], ctx: &str, eq_sign_span: Span, assignments: &mut Vec<hir::Stmt<'hir>>, ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>)1118 fn destructure_sequence( 1119 &mut self, 1120 elements: &[AstP<Expr>], 1121 ctx: &str, 1122 eq_sign_span: Span, 1123 assignments: &mut Vec<hir::Stmt<'hir>>, 1124 ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) { 1125 let mut rest = None; 1126 let elements = 1127 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| { 1128 // Check for `..` pattern. 1129 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind { 1130 if let Some((_, prev_span)) = rest { 1131 self.ban_extra_rest_pat(e.span, prev_span, ctx); 1132 } else { 1133 rest = Some((i, e.span)); 1134 } 1135 None 1136 } else { 1137 Some(self.destructure_assign_mut(e, eq_sign_span, assignments)) 1138 } 1139 })); 1140 (elements, rest) 1141 } 1142 1143 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`. lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir>1144 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> { 1145 let e1 = self.lower_expr_mut(e1); 1146 let e2 = self.lower_expr_mut(e2); 1147 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span)); 1148 let fn_expr = 1149 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new())); 1150 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2]) 1151 } 1152 lower_expr_range( &mut self, span: Span, e1: Option<&Expr>, e2: Option<&Expr>, lims: RangeLimits, ) -> hir::ExprKind<'hir>1153 fn lower_expr_range( 1154 &mut self, 1155 span: Span, 1156 e1: Option<&Expr>, 1157 e2: Option<&Expr>, 1158 lims: RangeLimits, 1159 ) -> hir::ExprKind<'hir> { 1160 use rustc_ast::RangeLimits::*; 1161 1162 let lang_item = match (e1, e2, lims) { 1163 (None, None, HalfOpen) => hir::LangItem::RangeFull, 1164 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom, 1165 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo, 1166 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range, 1167 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive, 1168 (Some(..), Some(..), Closed) => unreachable!(), 1169 (_, None, Closed) => self.diagnostic().span_fatal(span, "inclusive range with no end"), 1170 }; 1171 1172 let fields = self.arena.alloc_from_iter( 1173 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| { 1174 let expr = self.lower_expr(&e); 1175 let ident = Ident::new(Symbol::intern(s), self.lower_span(e.span)); 1176 self.expr_field(ident, expr, e.span) 1177 }), 1178 ); 1179 1180 hir::ExprKind::Struct( 1181 self.arena.alloc(hir::QPath::LangItem(lang_item, self.lower_span(span))), 1182 fields, 1183 None, 1184 ) 1185 } 1186 lower_label(&self, opt_label: Option<Label>) -> Option<Label>1187 fn lower_label(&self, opt_label: Option<Label>) -> Option<Label> { 1188 let label = opt_label?; 1189 Some(Label { ident: self.lower_ident(label.ident) }) 1190 } 1191 lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination1192 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination { 1193 let target_id = match destination { 1194 Some((id, _)) => { 1195 if let Some(loop_id) = self.resolver.get_label_res(id) { 1196 Ok(self.lower_node_id(loop_id)) 1197 } else { 1198 Err(hir::LoopIdError::UnresolvedLabel) 1199 } 1200 } 1201 None => self 1202 .loop_scope 1203 .map(|id| Ok(self.lower_node_id(id))) 1204 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)), 1205 }; 1206 let label = self.lower_label(destination.map(|(_, label)| label)); 1207 hir::Destination { label, target_id } 1208 } 1209 lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination1210 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination { 1211 if self.is_in_loop_condition && opt_label.is_none() { 1212 hir::Destination { 1213 label: None, 1214 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition), 1215 } 1216 } else { 1217 self.lower_loop_destination(opt_label.map(|label| (id, label))) 1218 } 1219 } 1220 with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T1221 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T { 1222 let old_scope = self.catch_scope.replace(catch_id); 1223 let result = f(self); 1224 self.catch_scope = old_scope; 1225 result 1226 } 1227 with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T1228 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T { 1229 // We're no longer in the base loop's condition; we're in another loop. 1230 let was_in_loop_condition = self.is_in_loop_condition; 1231 self.is_in_loop_condition = false; 1232 1233 let old_scope = self.loop_scope.replace(loop_id); 1234 let result = f(self); 1235 self.loop_scope = old_scope; 1236 1237 self.is_in_loop_condition = was_in_loop_condition; 1238 1239 result 1240 } 1241 with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T1242 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T { 1243 let was_in_loop_condition = self.is_in_loop_condition; 1244 self.is_in_loop_condition = true; 1245 1246 let result = f(self); 1247 1248 self.is_in_loop_condition = was_in_loop_condition; 1249 1250 result 1251 } 1252 lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir>1253 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> { 1254 let inner = hir::LlvmInlineAsmInner { 1255 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(), 1256 outputs: asm 1257 .outputs 1258 .iter() 1259 .map(|out| hir::LlvmInlineAsmOutput { 1260 constraint: out.constraint, 1261 is_rw: out.is_rw, 1262 is_indirect: out.is_indirect, 1263 span: self.lower_span(out.expr.span), 1264 }) 1265 .collect(), 1266 asm: asm.asm, 1267 asm_str_style: asm.asm_str_style, 1268 clobbers: asm.clobbers.clone(), 1269 volatile: asm.volatile, 1270 alignstack: asm.alignstack, 1271 dialect: asm.dialect, 1272 }; 1273 let hir_asm = hir::LlvmInlineAsm { 1274 inner, 1275 inputs_exprs: self.arena.alloc_from_iter( 1276 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)), 1277 ), 1278 outputs_exprs: self 1279 .arena 1280 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))), 1281 }; 1282 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm)) 1283 } 1284 lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir>1285 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> { 1286 hir::ExprField { 1287 hir_id: self.next_id(), 1288 ident: self.lower_ident(f.ident), 1289 expr: self.lower_expr(&f.expr), 1290 span: self.lower_span(f.span), 1291 is_shorthand: f.is_shorthand, 1292 } 1293 } 1294 lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir>1295 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> { 1296 match self.generator_kind { 1297 Some(hir::GeneratorKind::Gen) => {} 1298 Some(hir::GeneratorKind::Async(_)) => { 1299 struct_span_err!( 1300 self.sess, 1301 span, 1302 E0727, 1303 "`async` generators are not yet supported" 1304 ) 1305 .emit(); 1306 } 1307 None => self.generator_kind = Some(hir::GeneratorKind::Gen), 1308 } 1309 1310 let expr = 1311 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span)); 1312 1313 hir::ExprKind::Yield(expr, hir::YieldSource::Yield) 1314 } 1315 1316 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into: 1317 /// ```rust 1318 /// { 1319 /// let result = match IntoIterator::into_iter(<head>) { 1320 /// mut iter => { 1321 /// [opt_ident]: loop { 1322 /// match Iterator::next(&mut iter) { 1323 /// None => break, 1324 /// Some(<pat>) => <body>, 1325 /// }; 1326 /// } 1327 /// } 1328 /// }; 1329 /// result 1330 /// } 1331 /// ``` lower_expr_for( &mut self, e: &Expr, pat: &Pat, head: &Expr, body: &Block, opt_label: Option<Label>, ) -> hir::Expr<'hir>1332 fn lower_expr_for( 1333 &mut self, 1334 e: &Expr, 1335 pat: &Pat, 1336 head: &Expr, 1337 body: &Block, 1338 opt_label: Option<Label>, 1339 ) -> hir::Expr<'hir> { 1340 let head = self.lower_expr_mut(head); 1341 let pat = self.lower_pat(pat); 1342 let for_span = 1343 self.mark_span_with_reason(DesugaringKind::ForLoop, self.lower_span(e.span), None); 1344 let head_span = self.mark_span_with_reason(DesugaringKind::ForLoop, head.span, None); 1345 let pat_span = self.mark_span_with_reason(DesugaringKind::ForLoop, pat.span, None); 1346 1347 // `None => break` 1348 let none_arm = { 1349 let break_expr = 1350 self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span, ThinVec::new())); 1351 let pat = self.pat_none(for_span); 1352 self.arm(pat, break_expr) 1353 }; 1354 1355 // Some(<pat>) => <body>, 1356 let some_arm = { 1357 let some_pat = self.pat_some(pat_span, pat); 1358 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false)); 1359 let body_expr = self.arena.alloc(self.expr_block(body_block, ThinVec::new())); 1360 self.arm(some_pat, body_expr) 1361 }; 1362 1363 // `mut iter` 1364 let iter = Ident::with_dummy_span(sym::iter); 1365 let (iter_pat, iter_pat_nid) = 1366 self.pat_ident_binding_mode(head_span, iter, hir::BindingAnnotation::Mutable); 1367 1368 // `match Iterator::next(&mut iter) { ... }` 1369 let match_expr = { 1370 let iter = self.expr_ident(head_span, iter, iter_pat_nid); 1371 let ref_mut_iter = self.expr_mut_addr_of(head_span, iter); 1372 let next_expr = self.expr_call_lang_item_fn( 1373 head_span, 1374 hir::LangItem::IteratorNext, 1375 arena_vec![self; ref_mut_iter], 1376 ); 1377 let arms = arena_vec![self; none_arm, some_arm]; 1378 1379 self.expr_match(head_span, next_expr, arms, hir::MatchSource::ForLoopDesugar) 1380 }; 1381 let match_stmt = self.stmt_expr(for_span, match_expr); 1382 1383 let loop_block = self.block_all(for_span, arena_vec![self; match_stmt], None); 1384 1385 // `[opt_ident]: loop { ... }` 1386 let kind = hir::ExprKind::Loop( 1387 loop_block, 1388 self.lower_label(opt_label), 1389 hir::LoopSource::ForLoop, 1390 self.lower_span(for_span.with_hi(head.span.hi())), 1391 ); 1392 let loop_expr = 1393 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: for_span }); 1394 1395 // `mut iter => { ... }` 1396 let iter_arm = self.arm(iter_pat, loop_expr); 1397 1398 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }` 1399 let into_iter_expr = { 1400 self.expr_call_lang_item_fn( 1401 head_span, 1402 hir::LangItem::IntoIterIntoIter, 1403 arena_vec![self; head], 1404 ) 1405 }; 1406 1407 let match_expr = self.arena.alloc(self.expr_match( 1408 for_span, 1409 into_iter_expr, 1410 arena_vec![self; iter_arm], 1411 hir::MatchSource::ForLoopDesugar, 1412 )); 1413 1414 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect(); 1415 1416 // This is effectively `{ let _result = ...; _result }`. 1417 // The construct was introduced in #21984 and is necessary to make sure that 1418 // temporaries in the `head` expression are dropped and do not leak to the 1419 // surrounding scope of the `match` since the `match` is not a terminating scope. 1420 // 1421 // Also, add the attributes to the outer returned expr node. 1422 self.expr_drop_temps_mut(for_span, match_expr, attrs.into()) 1423 } 1424 1425 /// Desugar `ExprKind::Try` from: `<expr>?` into: 1426 /// ```rust 1427 /// match Try::branch(<expr>) { 1428 /// ControlFlow::Continue(val) => #[allow(unreachable_code)] val,, 1429 /// ControlFlow::Break(residual) => 1430 /// #[allow(unreachable_code)] 1431 /// // If there is an enclosing `try {...}`: 1432 /// break 'catch_target Try::from_residual(residual), 1433 /// // Otherwise: 1434 /// return Try::from_residual(residual), 1435 /// } 1436 /// ``` lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir>1437 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> { 1438 let unstable_span = self.mark_span_with_reason( 1439 DesugaringKind::QuestionMark, 1440 span, 1441 self.allow_try_trait.clone(), 1442 ); 1443 let try_span = self.sess.source_map().end_point(span); 1444 let try_span = self.mark_span_with_reason( 1445 DesugaringKind::QuestionMark, 1446 try_span, 1447 self.allow_try_trait.clone(), 1448 ); 1449 1450 // `Try::branch(<expr>)` 1451 let scrutinee = { 1452 // expand <expr> 1453 let sub_expr = self.lower_expr_mut(sub_expr); 1454 1455 self.expr_call_lang_item_fn( 1456 unstable_span, 1457 hir::LangItem::TryTraitBranch, 1458 arena_vec![self; sub_expr], 1459 ) 1460 }; 1461 1462 // `#[allow(unreachable_code)]` 1463 let attr = { 1464 // `allow(unreachable_code)` 1465 let allow = { 1466 let allow_ident = Ident::new(sym::allow, self.lower_span(span)); 1467 let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span)); 1468 let uc_nested = attr::mk_nested_word_item(uc_ident); 1469 attr::mk_list_item(allow_ident, vec![uc_nested]) 1470 }; 1471 attr::mk_attr_outer(allow) 1472 }; 1473 let attrs = vec![attr]; 1474 1475 // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,` 1476 let continue_arm = { 1477 let val_ident = Ident::with_dummy_span(sym::val); 1478 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); 1479 let val_expr = self.arena.alloc(self.expr_ident_with_attrs( 1480 span, 1481 val_ident, 1482 val_pat_nid, 1483 ThinVec::from(attrs.clone()), 1484 )); 1485 let continue_pat = self.pat_cf_continue(unstable_span, val_pat); 1486 self.arm(continue_pat, val_expr) 1487 }; 1488 1489 // `ControlFlow::Break(residual) => 1490 // #[allow(unreachable_code)] 1491 // return Try::from_residual(residual),` 1492 let break_arm = { 1493 let residual_ident = Ident::with_dummy_span(sym::residual); 1494 let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident); 1495 let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid); 1496 let from_residual_expr = self.wrap_in_try_constructor( 1497 hir::LangItem::TryTraitFromResidual, 1498 try_span, 1499 self.arena.alloc(residual_expr), 1500 unstable_span, 1501 ); 1502 let thin_attrs = ThinVec::from(attrs); 1503 let ret_expr = if let Some(catch_node) = self.catch_scope { 1504 let target_id = Ok(self.lower_node_id(catch_node)); 1505 self.arena.alloc(self.expr( 1506 try_span, 1507 hir::ExprKind::Break( 1508 hir::Destination { label: None, target_id }, 1509 Some(from_residual_expr), 1510 ), 1511 thin_attrs, 1512 )) 1513 } else { 1514 self.arena.alloc(self.expr( 1515 try_span, 1516 hir::ExprKind::Ret(Some(from_residual_expr)), 1517 thin_attrs, 1518 )) 1519 }; 1520 1521 let break_pat = self.pat_cf_break(try_span, residual_local); 1522 self.arm(break_pat, ret_expr) 1523 }; 1524 1525 hir::ExprKind::Match( 1526 scrutinee, 1527 arena_vec![self; break_arm, continue_arm], 1528 hir::MatchSource::TryDesugar, 1529 ) 1530 } 1531 1532 // ========================================================================= 1533 // Helper methods for building HIR. 1534 // ========================================================================= 1535 1536 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`. 1537 /// 1538 /// In terms of drop order, it has the same effect as wrapping `expr` in 1539 /// `{ let _t = $expr; _t }` but should provide better compile-time performance. 1540 /// 1541 /// The drop order can be important in e.g. `if expr { .. }`. expr_drop_temps( &mut self, span: Span, expr: &'hir hir::Expr<'hir>, attrs: AttrVec, ) -> &'hir hir::Expr<'hir>1542 pub(super) fn expr_drop_temps( 1543 &mut self, 1544 span: Span, 1545 expr: &'hir hir::Expr<'hir>, 1546 attrs: AttrVec, 1547 ) -> &'hir hir::Expr<'hir> { 1548 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs)) 1549 } 1550 expr_drop_temps_mut( &mut self, span: Span, expr: &'hir hir::Expr<'hir>, attrs: AttrVec, ) -> hir::Expr<'hir>1551 pub(super) fn expr_drop_temps_mut( 1552 &mut self, 1553 span: Span, 1554 expr: &'hir hir::Expr<'hir>, 1555 attrs: AttrVec, 1556 ) -> hir::Expr<'hir> { 1557 self.expr(span, hir::ExprKind::DropTemps(expr), attrs) 1558 } 1559 expr_match( &mut self, span: Span, arg: &'hir hir::Expr<'hir>, arms: &'hir [hir::Arm<'hir>], source: hir::MatchSource, ) -> hir::Expr<'hir>1560 fn expr_match( 1561 &mut self, 1562 span: Span, 1563 arg: &'hir hir::Expr<'hir>, 1564 arms: &'hir [hir::Arm<'hir>], 1565 source: hir::MatchSource, 1566 ) -> hir::Expr<'hir> { 1567 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new()) 1568 } 1569 expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir>1570 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir> { 1571 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None); 1572 self.expr(span, expr_break, attrs) 1573 } 1574 expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir>1575 fn expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> { 1576 let expr_break = self.expr_break(span, attrs); 1577 self.arena.alloc(expr_break) 1578 } 1579 expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir>1580 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> { 1581 self.expr( 1582 span, 1583 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e), 1584 ThinVec::new(), 1585 ) 1586 } 1587 expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir>1588 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> { 1589 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new())) 1590 } 1591 expr_call_mut( &mut self, span: Span, e: &'hir hir::Expr<'hir>, args: &'hir [hir::Expr<'hir>], ) -> hir::Expr<'hir>1592 fn expr_call_mut( 1593 &mut self, 1594 span: Span, 1595 e: &'hir hir::Expr<'hir>, 1596 args: &'hir [hir::Expr<'hir>], 1597 ) -> hir::Expr<'hir> { 1598 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new()) 1599 } 1600 expr_call( &mut self, span: Span, e: &'hir hir::Expr<'hir>, args: &'hir [hir::Expr<'hir>], ) -> &'hir hir::Expr<'hir>1601 fn expr_call( 1602 &mut self, 1603 span: Span, 1604 e: &'hir hir::Expr<'hir>, 1605 args: &'hir [hir::Expr<'hir>], 1606 ) -> &'hir hir::Expr<'hir> { 1607 self.arena.alloc(self.expr_call_mut(span, e, args)) 1608 } 1609 expr_call_lang_item_fn_mut( &mut self, span: Span, lang_item: hir::LangItem, args: &'hir [hir::Expr<'hir>], ) -> hir::Expr<'hir>1610 fn expr_call_lang_item_fn_mut( 1611 &mut self, 1612 span: Span, 1613 lang_item: hir::LangItem, 1614 args: &'hir [hir::Expr<'hir>], 1615 ) -> hir::Expr<'hir> { 1616 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new())); 1617 self.expr_call_mut(span, path, args) 1618 } 1619 expr_call_lang_item_fn( &mut self, span: Span, lang_item: hir::LangItem, args: &'hir [hir::Expr<'hir>], ) -> &'hir hir::Expr<'hir>1620 fn expr_call_lang_item_fn( 1621 &mut self, 1622 span: Span, 1623 lang_item: hir::LangItem, 1624 args: &'hir [hir::Expr<'hir>], 1625 ) -> &'hir hir::Expr<'hir> { 1626 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args)) 1627 } 1628 expr_lang_item_path( &mut self, span: Span, lang_item: hir::LangItem, attrs: AttrVec, ) -> hir::Expr<'hir>1629 fn expr_lang_item_path( 1630 &mut self, 1631 span: Span, 1632 lang_item: hir::LangItem, 1633 attrs: AttrVec, 1634 ) -> hir::Expr<'hir> { 1635 self.expr( 1636 span, 1637 hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span))), 1638 attrs, 1639 ) 1640 } 1641 expr_ident( &mut self, sp: Span, ident: Ident, binding: hir::HirId, ) -> &'hir hir::Expr<'hir>1642 pub(super) fn expr_ident( 1643 &mut self, 1644 sp: Span, 1645 ident: Ident, 1646 binding: hir::HirId, 1647 ) -> &'hir hir::Expr<'hir> { 1648 self.arena.alloc(self.expr_ident_mut(sp, ident, binding)) 1649 } 1650 expr_ident_mut( &mut self, sp: Span, ident: Ident, binding: hir::HirId, ) -> hir::Expr<'hir>1651 pub(super) fn expr_ident_mut( 1652 &mut self, 1653 sp: Span, 1654 ident: Ident, 1655 binding: hir::HirId, 1656 ) -> hir::Expr<'hir> { 1657 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new()) 1658 } 1659 expr_ident_with_attrs( &mut self, span: Span, ident: Ident, binding: hir::HirId, attrs: AttrVec, ) -> hir::Expr<'hir>1660 fn expr_ident_with_attrs( 1661 &mut self, 1662 span: Span, 1663 ident: Ident, 1664 binding: hir::HirId, 1665 attrs: AttrVec, 1666 ) -> hir::Expr<'hir> { 1667 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved( 1668 None, 1669 self.arena.alloc(hir::Path { 1670 span: self.lower_span(span), 1671 res: Res::Local(binding), 1672 segments: arena_vec![self; hir::PathSegment::from_ident(ident)], 1673 }), 1674 )); 1675 1676 self.expr(span, expr_path, attrs) 1677 } 1678 expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir>1679 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> { 1680 let hir_id = self.next_id(); 1681 let span = expr.span; 1682 self.expr( 1683 span, 1684 hir::ExprKind::Block( 1685 self.arena.alloc(hir::Block { 1686 stmts: &[], 1687 expr: Some(expr), 1688 hir_id, 1689 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated), 1690 span: self.lower_span(span), 1691 targeted_by_break: false, 1692 }), 1693 None, 1694 ), 1695 ThinVec::new(), 1696 ) 1697 } 1698 expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir>1699 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> { 1700 let blk = self.block_all(span, &[], None); 1701 let expr = self.expr_block(blk, ThinVec::new()); 1702 self.arena.alloc(expr) 1703 } 1704 expr_block( &mut self, b: &'hir hir::Block<'hir>, attrs: AttrVec, ) -> hir::Expr<'hir>1705 pub(super) fn expr_block( 1706 &mut self, 1707 b: &'hir hir::Block<'hir>, 1708 attrs: AttrVec, 1709 ) -> hir::Expr<'hir> { 1710 self.expr(b.span, hir::ExprKind::Block(b, None), attrs) 1711 } 1712 expr( &mut self, span: Span, kind: hir::ExprKind<'hir>, attrs: AttrVec, ) -> hir::Expr<'hir>1713 pub(super) fn expr( 1714 &mut self, 1715 span: Span, 1716 kind: hir::ExprKind<'hir>, 1717 attrs: AttrVec, 1718 ) -> hir::Expr<'hir> { 1719 let hir_id = self.next_id(); 1720 self.lower_attrs(hir_id, &attrs); 1721 hir::Expr { hir_id, kind, span: self.lower_span(span) } 1722 } 1723 expr_field( &mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span, ) -> hir::ExprField<'hir>1724 fn expr_field( 1725 &mut self, 1726 ident: Ident, 1727 expr: &'hir hir::Expr<'hir>, 1728 span: Span, 1729 ) -> hir::ExprField<'hir> { 1730 hir::ExprField { 1731 hir_id: self.next_id(), 1732 ident, 1733 span: self.lower_span(span), 1734 expr, 1735 is_shorthand: false, 1736 } 1737 } 1738 arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir>1739 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> { 1740 hir::Arm { 1741 hir_id: self.next_id(), 1742 pat, 1743 guard: None, 1744 span: self.lower_span(expr.span), 1745 body: expr, 1746 } 1747 } 1748 } 1749