1// Copyright 2013 The Go Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style 3// license that can be found in the LICENSE file. 4 5package ir 6 7// This file implements the BUILD phase of IR construction. 8// 9// IR construction has two phases, CREATE and BUILD. In the CREATE phase 10// (create.go), all packages are constructed and type-checked and 11// definitions of all package members are created, method-sets are 12// computed, and wrapper methods are synthesized. 13// ir.Packages are created in arbitrary order. 14// 15// In the BUILD phase (builder.go), the builder traverses the AST of 16// each Go source function and generates IR instructions for the 17// function body. Initializer expressions for package-level variables 18// are emitted to the package's init() function in the order specified 19// by go/types.Info.InitOrder, then code for each function in the 20// package is generated in lexical order. 21// 22// The builder's and Program's indices (maps) are populated and 23// mutated during the CREATE phase, but during the BUILD phase they 24// remain constant. The sole exception is Prog.methodSets and its 25// related maps, which are protected by a dedicated mutex. 26 27import ( 28 "fmt" 29 "go/ast" 30 "go/constant" 31 "go/token" 32 "go/types" 33 "os" 34) 35 36type opaqueType struct { 37 types.Type 38 name string 39} 40 41func (t *opaqueType) String() string { return t.name } 42 43var ( 44 varOk = newVar("ok", tBool) 45 varIndex = newVar("index", tInt) 46 47 // Type constants. 48 tBool = types.Typ[types.Bool] 49 tByte = types.Typ[types.Byte] 50 tInt = types.Typ[types.Int] 51 tInvalid = types.Typ[types.Invalid] 52 tString = types.Typ[types.String] 53 tUntypedNil = types.Typ[types.UntypedNil] 54 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators 55 tEface = types.NewInterfaceType(nil, nil).Complete() 56) 57 58// builder holds state associated with the package currently being built. 59// Its methods contain all the logic for AST-to-IR conversion. 60type builder struct { 61 printFunc string 62 63 blocksets [5]BlockSet 64} 65 66// cond emits to fn code to evaluate boolean condition e and jump 67// to t or f depending on its value, performing various simplifications. 68// 69// Postcondition: fn.currentBlock is nil. 70// 71func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { 72 switch e := e.(type) { 73 case *ast.ParenExpr: 74 return b.cond(fn, e.X, t, f) 75 76 case *ast.BinaryExpr: 77 switch e.Op { 78 case token.LAND: 79 ltrue := fn.newBasicBlock("cond.true") 80 b.cond(fn, e.X, ltrue, f) 81 fn.currentBlock = ltrue 82 return b.cond(fn, e.Y, t, f) 83 84 case token.LOR: 85 lfalse := fn.newBasicBlock("cond.false") 86 b.cond(fn, e.X, t, lfalse) 87 fn.currentBlock = lfalse 88 return b.cond(fn, e.Y, t, f) 89 } 90 91 case *ast.UnaryExpr: 92 if e.Op == token.NOT { 93 return b.cond(fn, e.X, f, t) 94 } 95 } 96 97 // A traditional compiler would simplify "if false" (etc) here 98 // but we do not, for better fidelity to the source code. 99 // 100 // The value of a constant condition may be platform-specific, 101 // and may cause blocks that are reachable in some configuration 102 // to be hidden from subsequent analyses such as bug-finding tools. 103 return emitIf(fn, b.expr(fn, e), t, f, e) 104} 105 106// logicalBinop emits code to fn to evaluate e, a &&- or 107// ||-expression whose reified boolean value is wanted. 108// The value is returned. 109// 110func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 111 rhs := fn.newBasicBlock("binop.rhs") 112 done := fn.newBasicBlock("binop.done") 113 114 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 115 // eliminated. 116 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 117 t := fn.Pkg.typeOf(e) 118 119 var short Value // value of the short-circuit path 120 switch e.Op { 121 case token.LAND: 122 b.cond(fn, e.X, rhs, done) 123 short = emitConst(fn, NewConst(constant.MakeBool(false), t)) 124 125 case token.LOR: 126 b.cond(fn, e.X, done, rhs) 127 short = emitConst(fn, NewConst(constant.MakeBool(true), t)) 128 } 129 130 // Is rhs unreachable? 131 if rhs.Preds == nil { 132 // Simplify false&&y to false, true||y to true. 133 fn.currentBlock = done 134 return short 135 } 136 137 // Is done unreachable? 138 if done.Preds == nil { 139 // Simplify true&&y (or false||y) to y. 140 fn.currentBlock = rhs 141 return b.expr(fn, e.Y) 142 } 143 144 // All edges from e.X to done carry the short-circuit value. 145 var edges []Value 146 for range done.Preds { 147 edges = append(edges, short) 148 } 149 150 // The edge from e.Y to done carries the value of e.Y. 151 fn.currentBlock = rhs 152 edges = append(edges, b.expr(fn, e.Y)) 153 emitJump(fn, done, e) 154 fn.currentBlock = done 155 156 phi := &Phi{Edges: edges} 157 phi.typ = t 158 return done.emit(phi, e) 159} 160 161// exprN lowers a multi-result expression e to IR form, emitting code 162// to fn and returning a single Value whose type is a *types.Tuple. 163// The caller must access the components via Extract. 164// 165// Multi-result expressions include CallExprs in a multi-value 166// assignment or return statement, and "value,ok" uses of 167// TypeAssertExpr, IndexExpr (when X is a map), and Recv. 168// 169func (b *builder) exprN(fn *Function, e ast.Expr) Value { 170 typ := fn.Pkg.typeOf(e).(*types.Tuple) 171 switch e := e.(type) { 172 case *ast.ParenExpr: 173 return b.exprN(fn, e.X) 174 175 case *ast.CallExpr: 176 // Currently, no built-in function nor type conversion 177 // has multiple results, so we can avoid some of the 178 // cases for single-valued CallExpr. 179 var c Call 180 b.setCall(fn, e, &c.Call) 181 c.typ = typ 182 return fn.emit(&c, e) 183 184 case *ast.IndexExpr: 185 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 186 lookup := &MapLookup{ 187 X: b.expr(fn, e.X), 188 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e), 189 CommaOk: true, 190 } 191 lookup.setType(typ) 192 return fn.emit(lookup, e) 193 194 case *ast.TypeAssertExpr: 195 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e) 196 197 case *ast.UnaryExpr: // must be receive <- 198 return emitRecv(fn, b.expr(fn, e.X), true, typ, e) 199 } 200 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 201} 202 203// builtin emits to fn IR instructions to implement a call to the 204// built-in function obj with the specified arguments 205// and return type. It returns the value defined by the result. 206// 207// The result is nil if no special handling was required; in this case 208// the caller should treat this like an ordinary library function 209// call. 210// 211func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { 212 switch obj.Name() { 213 case "make": 214 switch typ.Underlying().(type) { 215 case *types.Slice: 216 n := b.expr(fn, args[1]) 217 m := n 218 if len(args) == 3 { 219 m = b.expr(fn, args[2]) 220 } 221 if m, ok := m.(*Const); ok { 222 // treat make([]T, n, m) as new([m]T)[:n] 223 cap := m.Int64() 224 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) 225 alloc := emitNew(fn, at, source) 226 v := &Slice{ 227 X: alloc, 228 High: n, 229 } 230 v.setType(typ) 231 return fn.emit(v, source) 232 } 233 v := &MakeSlice{ 234 Len: n, 235 Cap: m, 236 } 237 v.setType(typ) 238 return fn.emit(v, source) 239 240 case *types.Map: 241 var res Value 242 if len(args) == 2 { 243 res = b.expr(fn, args[1]) 244 } 245 v := &MakeMap{Reserve: res} 246 v.setType(typ) 247 return fn.emit(v, source) 248 249 case *types.Chan: 250 var sz Value = emitConst(fn, intConst(0)) 251 if len(args) == 2 { 252 sz = b.expr(fn, args[1]) 253 } 254 v := &MakeChan{Size: sz} 255 v.setType(typ) 256 return fn.emit(v, source) 257 } 258 259 case "new": 260 alloc := emitNew(fn, deref(typ), source) 261 return alloc 262 263 case "len", "cap": 264 // Special case: len or cap of an array or *array is 265 // based on the type, not the value which may be nil. 266 // We must still evaluate the value, though. (If it 267 // was side-effect free, the whole call would have 268 // been constant-folded.) 269 t := deref(fn.Pkg.typeOf(args[0])).Underlying() 270 if at, ok := t.(*types.Array); ok { 271 b.expr(fn, args[0]) // for effects only 272 return emitConst(fn, intConst(at.Len())) 273 } 274 // Otherwise treat as normal. 275 276 case "panic": 277 fn.emit(&Panic{ 278 X: emitConv(fn, b.expr(fn, args[0]), tEface, source), 279 }, source) 280 addEdge(fn.currentBlock, fn.Exit) 281 fn.currentBlock = fn.newBasicBlock("unreachable") 282 return emitConst(fn, NewConst(constant.MakeBool(true), tBool)) // any non-nil Value will do 283 } 284 return nil // treat all others as a regular function call 285} 286 287// addr lowers a single-result addressable expression e to IR form, 288// emitting code to fn and returning the location (an lvalue) defined 289// by the expression. 290// 291// If escaping is true, addr marks the base variable of the 292// addressable expression e as being a potentially escaping pointer 293// value. For example, in this code: 294// 295// a := A{ 296// b: [1]B{B{c: 1}} 297// } 298// return &a.b[0].c 299// 300// the application of & causes a.b[0].c to have its address taken, 301// which means that ultimately the local variable a must be 302// heap-allocated. This is a simple but very conservative escape 303// analysis. 304// 305// Operations forming potentially escaping pointers include: 306// - &x, including when implicit in method call or composite literals. 307// - a[:] iff a is an array (not *array) 308// - references to variables in lexically enclosing functions. 309// 310func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 311 switch e := e.(type) { 312 case *ast.Ident: 313 if isBlankIdent(e) { 314 return blank{} 315 } 316 obj := fn.Pkg.objectOf(e) 317 v := fn.Prog.packageLevelValue(obj) // var (address) 318 if v == nil { 319 v = fn.lookup(obj, escaping) 320 } 321 return &address{addr: v, expr: e} 322 323 case *ast.CompositeLit: 324 t := deref(fn.Pkg.typeOf(e)) 325 var v *Alloc 326 if escaping { 327 v = emitNew(fn, t, e) 328 } else { 329 v = fn.addLocal(t, e) 330 } 331 var sb storebuf 332 b.compLit(fn, v, e, true, &sb) 333 sb.emit(fn) 334 return &address{addr: v, expr: e} 335 336 case *ast.ParenExpr: 337 return b.addr(fn, e.X, escaping) 338 339 case *ast.SelectorExpr: 340 sel, ok := fn.Pkg.info.Selections[e] 341 if !ok { 342 // qualified identifier 343 return b.addr(fn, e.Sel, escaping) 344 } 345 if sel.Kind() != types.FieldVal { 346 panic(sel) 347 } 348 wantAddr := true 349 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) 350 last := len(sel.Index()) - 1 351 return &address{ 352 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), 353 expr: e.Sel, 354 } 355 356 case *ast.IndexExpr: 357 var x Value 358 var et types.Type 359 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 360 case *types.Array: 361 x = b.addr(fn, e.X, escaping).address(fn) 362 et = types.NewPointer(t.Elem()) 363 case *types.Pointer: // *array 364 x = b.expr(fn, e.X) 365 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) 366 case *types.Slice: 367 x = b.expr(fn, e.X) 368 et = types.NewPointer(t.Elem()) 369 case *types.Map: 370 return &element{ 371 m: b.expr(fn, e.X), 372 k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), 373 t: t.Elem(), 374 } 375 default: 376 panic("unexpected container type in IndexExpr: " + t.String()) 377 } 378 v := &IndexAddr{ 379 X: x, 380 Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), 381 } 382 v.setType(et) 383 return &address{addr: fn.emit(v, e), expr: e} 384 385 case *ast.StarExpr: 386 return &address{addr: b.expr(fn, e.X), expr: e} 387 } 388 389 panic(fmt.Sprintf("unexpected address expression: %T", e)) 390} 391 392type store struct { 393 lhs lvalue 394 rhs Value 395 source ast.Node 396} 397 398type storebuf struct{ stores []store } 399 400func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) { 401 sb.stores = append(sb.stores, store{lhs, rhs, source}) 402} 403 404func (sb *storebuf) emit(fn *Function) { 405 for _, s := range sb.stores { 406 s.lhs.store(fn, s.rhs, s.source) 407 } 408} 409 410// assign emits to fn code to initialize the lvalue loc with the value 411// of expression e. If isZero is true, assign assumes that loc holds 412// the zero value for its type. 413// 414// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 415// better code in some cases, e.g., for composite literals in an 416// addressable location. 417// 418// If sb is not nil, assign generates code to evaluate expression e, but 419// not to update loc. Instead, the necessary stores are appended to the 420// storebuf sb so that they can be executed later. This allows correct 421// in-place update of existing variables when the RHS is a composite 422// literal that may reference parts of the LHS. 423// 424func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) { 425 // Can we initialize it in place? 426 if e, ok := unparen(e).(*ast.CompositeLit); ok { 427 // A CompositeLit never evaluates to a pointer, 428 // so if the type of the location is a pointer, 429 // an &-operation is implied. 430 if _, ok := loc.(blank); !ok { // avoid calling blank.typ() 431 if isPointer(loc.typ()) { 432 ptr := b.addr(fn, e, true).address(fn) 433 // copy address 434 if sb != nil { 435 sb.store(loc, ptr, source) 436 } else { 437 loc.store(fn, ptr, source) 438 } 439 return 440 } 441 } 442 443 if _, ok := loc.(*address); ok { 444 if isInterface(loc.typ()) { 445 // e.g. var x interface{} = T{...} 446 // Can't in-place initialize an interface value. 447 // Fall back to copying. 448 } else { 449 // x = T{...} or x := T{...} 450 addr := loc.address(fn) 451 if sb != nil { 452 b.compLit(fn, addr, e, isZero, sb) 453 } else { 454 var sb storebuf 455 b.compLit(fn, addr, e, isZero, &sb) 456 sb.emit(fn) 457 } 458 459 // Subtle: emit debug ref for aggregate types only; 460 // slice and map are handled by store ops in compLit. 461 switch loc.typ().Underlying().(type) { 462 case *types.Struct, *types.Array: 463 emitDebugRef(fn, e, addr, true) 464 } 465 466 return 467 } 468 } 469 } 470 471 // simple case: just copy 472 rhs := b.expr(fn, e) 473 if sb != nil { 474 sb.store(loc, rhs, source) 475 } else { 476 loc.store(fn, rhs, source) 477 } 478} 479 480// expr lowers a single-result expression e to IR form, emitting code 481// to fn and returning the Value defined by the expression. 482// 483func (b *builder) expr(fn *Function, e ast.Expr) Value { 484 e = unparen(e) 485 486 tv := fn.Pkg.info.Types[e] 487 488 // Is expression a constant? 489 if tv.Value != nil { 490 return emitConst(fn, NewConst(tv.Value, tv.Type)) 491 } 492 493 var v Value 494 if tv.Addressable() { 495 // Prefer pointer arithmetic ({Index,Field}Addr) followed 496 // by Load over subelement extraction (e.g. Index, Field), 497 // to avoid large copies. 498 v = b.addr(fn, e, false).load(fn, e) 499 } else { 500 v = b.expr0(fn, e, tv) 501 } 502 if fn.debugInfo() { 503 emitDebugRef(fn, e, v, false) 504 } 505 return v 506} 507 508func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 509 switch e := e.(type) { 510 case *ast.BasicLit: 511 panic("non-constant BasicLit") // unreachable 512 513 case *ast.FuncLit: 514 fn2 := &Function{ 515 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 516 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), 517 parent: fn, 518 Pkg: fn.Pkg, 519 Prog: fn.Prog, 520 functionBody: new(functionBody), 521 } 522 fn2.source = e 523 fn.AnonFuncs = append(fn.AnonFuncs, fn2) 524 fn2.initHTML(b.printFunc) 525 b.buildFunction(fn2) 526 if fn2.FreeVars == nil { 527 return fn2 528 } 529 v := &MakeClosure{Fn: fn2} 530 v.setType(tv.Type) 531 for _, fv := range fn2.FreeVars { 532 v.Bindings = append(v.Bindings, fv.outer) 533 fv.outer = nil 534 } 535 return fn.emit(v, e) 536 537 case *ast.TypeAssertExpr: // single-result form only 538 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e) 539 540 case *ast.CallExpr: 541 if fn.Pkg.info.Types[e.Fun].IsType() { 542 // Explicit type conversion, e.g. string(x) or big.Int(x) 543 x := b.expr(fn, e.Args[0]) 544 y := emitConv(fn, x, tv.Type, e) 545 return y 546 } 547 // Call to "intrinsic" built-ins, e.g. new, make, panic. 548 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 549 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { 550 if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil { 551 return v 552 } 553 } 554 } 555 // Regular function call. 556 var v Call 557 b.setCall(fn, e, &v.Call) 558 v.setType(tv.Type) 559 return fn.emit(&v, e) 560 561 case *ast.UnaryExpr: 562 switch e.Op { 563 case token.AND: // &X --- potentially escaping. 564 addr := b.addr(fn, e.X, true) 565 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 566 // &*p must panic if p is nil (http://golang.org/s/go12nil). 567 // For simplicity, we'll just (suboptimally) rely 568 // on the side effects of a load. 569 // TODO(adonovan): emit dedicated nilcheck. 570 addr.load(fn, e) 571 } 572 return addr.address(fn) 573 case token.ADD: 574 return b.expr(fn, e.X) 575 case token.NOT, token.SUB, token.XOR: // ! <- - ^ 576 v := &UnOp{ 577 Op: e.Op, 578 X: b.expr(fn, e.X), 579 } 580 v.setType(tv.Type) 581 return fn.emit(v, e) 582 case token.ARROW: 583 return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e) 584 default: 585 panic(e.Op) 586 } 587 588 case *ast.BinaryExpr: 589 switch e.Op { 590 case token.LAND, token.LOR: 591 return b.logicalBinop(fn, e) 592 case token.SHL, token.SHR: 593 fallthrough 594 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 595 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e) 596 597 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 598 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e) 599 // The type of x==y may be UntypedBool. 600 return emitConv(fn, cmp, types.Default(tv.Type), e) 601 default: 602 panic("illegal op in BinaryExpr: " + e.Op.String()) 603 } 604 605 case *ast.SliceExpr: 606 var low, high, max Value 607 var x Value 608 switch fn.Pkg.typeOf(e.X).Underlying().(type) { 609 case *types.Array: 610 // Potentially escaping. 611 x = b.addr(fn, e.X, true).address(fn) 612 case *types.Basic, *types.Slice, *types.Pointer: // *array 613 x = b.expr(fn, e.X) 614 default: 615 panic("unreachable") 616 } 617 if e.High != nil { 618 high = b.expr(fn, e.High) 619 } 620 if e.Low != nil { 621 low = b.expr(fn, e.Low) 622 } 623 if e.Slice3 { 624 max = b.expr(fn, e.Max) 625 } 626 v := &Slice{ 627 X: x, 628 Low: low, 629 High: high, 630 Max: max, 631 } 632 v.setType(tv.Type) 633 return fn.emit(v, e) 634 635 case *ast.Ident: 636 obj := fn.Pkg.info.Uses[e] 637 // Universal built-in or nil? 638 switch obj := obj.(type) { 639 case *types.Builtin: 640 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 641 case *types.Nil: 642 return emitConst(fn, nilConst(tv.Type)) 643 } 644 // Package-level func or var? 645 if v := fn.Prog.packageLevelValue(obj); v != nil { 646 if _, ok := obj.(*types.Var); ok { 647 return emitLoad(fn, v, e) // var (address) 648 } 649 return v // (func) 650 } 651 // Local var. 652 return emitLoad(fn, fn.lookup(obj, false), e) // var (address) 653 654 case *ast.SelectorExpr: 655 sel, ok := fn.Pkg.info.Selections[e] 656 if !ok { 657 // qualified identifier 658 return b.expr(fn, e.Sel) 659 } 660 switch sel.Kind() { 661 case types.MethodExpr: 662 // (*T).f or T.f, the method f from the method-set of type T. 663 // The result is a "thunk". 664 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e) 665 666 case types.MethodVal: 667 // e.f where e is an expression and f is a method. 668 // The result is a "bound". 669 obj := sel.Obj().(*types.Func) 670 rt := recvType(obj) 671 wantAddr := isPointer(rt) 672 escaping := true 673 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) 674 if isInterface(rt) { 675 // If v has interface type I, 676 // we must emit a check that v is non-nil. 677 // We use: typeassert v.(I). 678 emitTypeAssert(fn, v, rt, e) 679 } 680 c := &MakeClosure{ 681 Fn: makeBound(fn.Prog, obj), 682 Bindings: []Value{v}, 683 } 684 c.source = e.Sel 685 c.setType(tv.Type) 686 return fn.emit(c, e) 687 688 case types.FieldVal: 689 indices := sel.Index() 690 last := len(indices) - 1 691 v := b.expr(fn, e.X) 692 v = emitImplicitSelections(fn, v, indices[:last], e) 693 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 694 return v 695 } 696 697 panic("unexpected expression-relative selector") 698 699 case *ast.IndexExpr: 700 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 701 case *types.Array: 702 // Non-addressable array (in a register). 703 v := &Index{ 704 X: b.expr(fn, e.X), 705 Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), 706 } 707 v.setType(t.Elem()) 708 return fn.emit(v, e) 709 710 case *types.Map: 711 // Maps are not addressable. 712 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 713 v := &MapLookup{ 714 X: b.expr(fn, e.X), 715 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e.Index), 716 } 717 v.setType(mapt.Elem()) 718 return fn.emit(v, e) 719 720 case *types.Basic: // => string 721 // Strings are not addressable. 722 v := &StringLookup{ 723 X: b.expr(fn, e.X), 724 Index: b.expr(fn, e.Index), 725 } 726 v.setType(tByte) 727 return fn.emit(v, e) 728 729 case *types.Slice, *types.Pointer: // *array 730 // Addressable slice/array; use IndexAddr and Load. 731 return b.addr(fn, e, false).load(fn, e) 732 733 default: 734 panic("unexpected container type in IndexExpr: " + t.String()) 735 } 736 737 case *ast.CompositeLit, *ast.StarExpr: 738 // Addressable types (lvalues) 739 return b.addr(fn, e, false).load(fn, e) 740 } 741 742 panic(fmt.Sprintf("unexpected expr: %T", e)) 743} 744 745// stmtList emits to fn code for all statements in list. 746func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 747 for _, s := range list { 748 b.stmt(fn, s) 749 } 750} 751 752// receiver emits to fn code for expression e in the "receiver" 753// position of selection e.f (where f may be a field or a method) and 754// returns the effective receiver after applying the implicit field 755// selections of sel. 756// 757// wantAddr requests that the result is an an address. If 758// !sel.Indirect(), this may require that e be built in addr() mode; it 759// must thus be addressable. 760// 761// escaping is defined as per builder.addr(). 762// 763func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value { 764 var v Value 765 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { 766 v = b.addr(fn, e, escaping).address(fn) 767 } else { 768 v = b.expr(fn, e) 769 } 770 771 last := len(sel.Index()) - 1 772 v = emitImplicitSelections(fn, v, sel.Index()[:last], source) 773 if !wantAddr && isPointer(v.Type()) { 774 v = emitLoad(fn, v, e) 775 } 776 return v 777} 778 779// setCallFunc populates the function parts of a CallCommon structure 780// (Func, Method, Recv, Args[0]) based on the kind of invocation 781// occurring in e. 782// 783func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 784 // Is this a method call? 785 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 786 sel, ok := fn.Pkg.info.Selections[selector] 787 if ok && sel.Kind() == types.MethodVal { 788 obj := sel.Obj().(*types.Func) 789 recv := recvType(obj) 790 wantAddr := isPointer(recv) 791 escaping := true 792 v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector) 793 if isInterface(recv) { 794 // Invoke-mode call. 795 c.Value = v 796 c.Method = obj 797 } else { 798 // "Call"-mode call. 799 c.Value = fn.Prog.declaredFunc(obj) 800 c.Args = append(c.Args, v) 801 } 802 return 803 } 804 805 // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): 806 // a statically dispatched call to the method f in the 807 // method-set of T or *T. T may be an interface. 808 // 809 // e.Fun would evaluate to a concrete method, interface 810 // wrapper function, or promotion wrapper. 811 // 812 // For now, we evaluate it in the usual way. 813 // 814 // TODO(adonovan): opt: inline expr() here, to make the 815 // call static and to avoid generation of wrappers. 816 // It's somewhat tricky as it may consume the first 817 // actual parameter if the call is "invoke" mode. 818 // 819 // Examples: 820 // type T struct{}; func (T) f() {} // "call" mode 821 // type T interface { f() } // "invoke" mode 822 // 823 // type S struct{ T } 824 // 825 // var s S 826 // S.f(s) 827 // (*S).f(&s) 828 // 829 // Suggested approach: 830 // - consume the first actual parameter expression 831 // and build it with b.expr(). 832 // - apply implicit field selections. 833 // - use MethodVal logic to populate fields of c. 834 } 835 836 // Evaluate the function operand in the usual way. 837 c.Value = b.expr(fn, e.Fun) 838} 839 840// emitCallArgs emits to f code for the actual parameters of call e to 841// a (possibly built-in) function of effective type sig. 842// The argument values are appended to args, which is then returned. 843// 844func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 845 // f(x, y, z...): pass slice z straight through. 846 if e.Ellipsis != 0 { 847 for i, arg := range e.Args { 848 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg) 849 args = append(args, v) 850 } 851 return args 852 } 853 854 offset := len(args) // 1 if call has receiver, 0 otherwise 855 856 // Evaluate actual parameter expressions. 857 // 858 // If this is a chained call of the form f(g()) where g has 859 // multiple return values (MRV), they are flattened out into 860 // args; a suffix of them may end up in a varargs slice. 861 for _, arg := range e.Args { 862 v := b.expr(fn, arg) 863 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 864 for i, n := 0, ttuple.Len(); i < n; i++ { 865 args = append(args, emitExtract(fn, v, i, arg)) 866 } 867 } else { 868 args = append(args, v) 869 } 870 } 871 872 // Actual->formal assignability conversions for normal parameters. 873 np := sig.Params().Len() // number of normal parameters 874 if sig.Variadic() { 875 np-- 876 } 877 for i := 0; i < np; i++ { 878 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source()) 879 } 880 881 // Actual->formal assignability conversions for variadic parameter, 882 // and construction of slice. 883 if sig.Variadic() { 884 varargs := args[offset+np:] 885 st := sig.Params().At(np).Type().(*types.Slice) 886 vt := st.Elem() 887 if len(varargs) == 0 { 888 args = append(args, emitConst(fn, nilConst(st))) 889 } else { 890 // Replace a suffix of args with a slice containing it. 891 at := types.NewArray(vt, int64(len(varargs))) 892 a := emitNew(fn, at, e) 893 a.source = e 894 for i, arg := range varargs { 895 iaddr := &IndexAddr{ 896 X: a, 897 Index: emitConst(fn, intConst(int64(i))), 898 } 899 iaddr.setType(types.NewPointer(vt)) 900 fn.emit(iaddr, e) 901 emitStore(fn, iaddr, arg, arg.Source()) 902 } 903 s := &Slice{X: a} 904 s.setType(st) 905 args[offset+np] = fn.emit(s, args[offset+np].Source()) 906 args = args[:offset+np+1] 907 } 908 } 909 return args 910} 911 912// setCall emits to fn code to evaluate all the parameters of a function 913// call e, and populates *c with those values. 914// 915func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 916 // First deal with the f(...) part and optional receiver. 917 b.setCallFunc(fn, e, c) 918 919 // Then append the other actual parameters. 920 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) 921 if sig == nil { 922 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 923 } 924 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 925} 926 927// assignOp emits to fn code to perform loc <op>= val. 928func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) { 929 oldv := loc.load(fn, source) 930 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type(), source), loc.typ(), source), source) 931} 932 933// localValueSpec emits to fn code to define all of the vars in the 934// function-local ValueSpec, spec. 935// 936func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 937 switch { 938 case len(spec.Values) == len(spec.Names): 939 // e.g. var x, y = 0, 1 940 // 1:1 assignment 941 for i, id := range spec.Names { 942 if !isBlankIdent(id) { 943 fn.addLocalForIdent(id) 944 } 945 lval := b.addr(fn, id, false) // non-escaping 946 b.assign(fn, lval, spec.Values[i], true, nil, spec) 947 } 948 949 case len(spec.Values) == 0: 950 // e.g. var x, y int 951 // Locals are implicitly zero-initialized. 952 for _, id := range spec.Names { 953 if !isBlankIdent(id) { 954 lhs := fn.addLocalForIdent(id) 955 if fn.debugInfo() { 956 emitDebugRef(fn, id, lhs, true) 957 } 958 } 959 } 960 961 default: 962 // e.g. var x, y = pos() 963 tuple := b.exprN(fn, spec.Values[0]) 964 for i, id := range spec.Names { 965 if !isBlankIdent(id) { 966 fn.addLocalForIdent(id) 967 lhs := b.addr(fn, id, false) // non-escaping 968 lhs.store(fn, emitExtract(fn, tuple, i, id), id) 969 } 970 } 971 } 972} 973 974// assignStmt emits code to fn for a parallel assignment of rhss to lhss. 975// isDef is true if this is a short variable declaration (:=). 976// 977// Note the similarity with localValueSpec. 978// 979func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) { 980 // Side effects of all LHSs and RHSs must occur in left-to-right order. 981 lvals := make([]lvalue, len(lhss)) 982 isZero := make([]bool, len(lhss)) 983 for i, lhs := range lhss { 984 var lval lvalue = blank{} 985 if !isBlankIdent(lhs) { 986 if isDef { 987 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { 988 fn.addNamedLocal(obj, lhs) 989 isZero[i] = true 990 } 991 } 992 lval = b.addr(fn, lhs, false) // non-escaping 993 } 994 lvals[i] = lval 995 } 996 if len(lhss) == len(rhss) { 997 // Simple assignment: x = f() (!isDef) 998 // Parallel assignment: x, y = f(), g() (!isDef) 999 // or short var decl: x, y := f(), g() (isDef) 1000 // 1001 // In all cases, the RHSs may refer to the LHSs, 1002 // so we need a storebuf. 1003 var sb storebuf 1004 for i := range rhss { 1005 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source) 1006 } 1007 sb.emit(fn) 1008 } else { 1009 // e.g. x, y = pos() 1010 tuple := b.exprN(fn, rhss[0]) 1011 emitDebugRef(fn, rhss[0], tuple, false) 1012 for i, lval := range lvals { 1013 lval.store(fn, emitExtract(fn, tuple, i, source), source) 1014 } 1015 } 1016} 1017 1018// arrayLen returns the length of the array whose composite literal elements are elts. 1019func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1020 var max int64 = -1 1021 var i int64 = -1 1022 for _, e := range elts { 1023 if kv, ok := e.(*ast.KeyValueExpr); ok { 1024 i = b.expr(fn, kv.Key).(*Const).Int64() 1025 } else { 1026 i++ 1027 } 1028 if i > max { 1029 max = i 1030 } 1031 } 1032 return max + 1 1033} 1034 1035// compLit emits to fn code to initialize a composite literal e at 1036// address addr with type typ. 1037// 1038// Nested composite literals are recursively initialized in place 1039// where possible. If isZero is true, compLit assumes that addr 1040// holds the zero value for typ. 1041// 1042// Because the elements of a composite literal may refer to the 1043// variables being updated, as in the second line below, 1044// x := T{a: 1} 1045// x = T{a: x.a} 1046// all the reads must occur before all the writes. Thus all stores to 1047// loc are emitted to the storebuf sb for later execution. 1048// 1049// A CompositeLit may have pointer type only in the recursive (nested) 1050// case when the type name is implicit. e.g. in []*T{{}}, the inner 1051// literal has type *T behaves like &T{}. 1052// In that case, addr must hold a T, not a *T. 1053// 1054func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1055 typ := deref(fn.Pkg.typeOf(e)) 1056 switch t := typ.Underlying().(type) { 1057 case *types.Struct: 1058 if !isZero && len(e.Elts) != t.NumFields() { 1059 // memclear 1060 sb.store(&address{addr, nil}, zeroValue(fn, deref(addr.Type()), e), e) 1061 isZero = true 1062 } 1063 for i, e := range e.Elts { 1064 fieldIndex := i 1065 if kv, ok := e.(*ast.KeyValueExpr); ok { 1066 fname := kv.Key.(*ast.Ident).Name 1067 for i, n := 0, t.NumFields(); i < n; i++ { 1068 sf := t.Field(i) 1069 if sf.Name() == fname { 1070 fieldIndex = i 1071 e = kv.Value 1072 break 1073 } 1074 } 1075 } 1076 sf := t.Field(fieldIndex) 1077 faddr := &FieldAddr{ 1078 X: addr, 1079 Field: fieldIndex, 1080 } 1081 faddr.setType(types.NewPointer(sf.Type())) 1082 fn.emit(faddr, e) 1083 b.assign(fn, &address{addr: faddr, expr: e}, e, isZero, sb, e) 1084 } 1085 1086 case *types.Array, *types.Slice: 1087 var at *types.Array 1088 var array Value 1089 switch t := t.(type) { 1090 case *types.Slice: 1091 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1092 alloc := emitNew(fn, at, e) 1093 array = alloc 1094 case *types.Array: 1095 at = t 1096 array = addr 1097 1098 if !isZero && int64(len(e.Elts)) != at.Len() { 1099 // memclear 1100 sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e) 1101 } 1102 } 1103 1104 var idx *Const 1105 for _, e := range e.Elts { 1106 if kv, ok := e.(*ast.KeyValueExpr); ok { 1107 idx = b.expr(fn, kv.Key).(*Const) 1108 e = kv.Value 1109 } else { 1110 var idxval int64 1111 if idx != nil { 1112 idxval = idx.Int64() + 1 1113 } 1114 idx = emitConst(fn, intConst(idxval)) 1115 } 1116 iaddr := &IndexAddr{ 1117 X: array, 1118 Index: idx, 1119 } 1120 iaddr.setType(types.NewPointer(at.Elem())) 1121 fn.emit(iaddr, e) 1122 if t != at { // slice 1123 // backing array is unaliased => storebuf not needed. 1124 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e) 1125 } else { 1126 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e) 1127 } 1128 } 1129 1130 if t != at { // slice 1131 s := &Slice{X: array} 1132 s.setType(typ) 1133 sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e) 1134 } 1135 1136 case *types.Map: 1137 m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts))))} 1138 m.setType(typ) 1139 fn.emit(m, e) 1140 for _, e := range e.Elts { 1141 e := e.(*ast.KeyValueExpr) 1142 1143 // If a key expression in a map literal is itself a 1144 // composite literal, the type may be omitted. 1145 // For example: 1146 // map[*struct{}]bool{{}: true} 1147 // An &-operation may be implied: 1148 // map[*struct{}]bool{&struct{}{}: true} 1149 var key Value 1150 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { 1151 // A CompositeLit never evaluates to a pointer, 1152 // so if the type of the location is a pointer, 1153 // an &-operation is implied. 1154 key = b.addr(fn, e.Key, true).address(fn) 1155 } else { 1156 key = b.expr(fn, e.Key) 1157 } 1158 1159 loc := element{ 1160 m: m, 1161 k: emitConv(fn, key, t.Key(), e), 1162 t: t.Elem(), 1163 } 1164 1165 // We call assign() only because it takes care 1166 // of any &-operation required in the recursive 1167 // case, e.g., 1168 // map[int]*struct{}{0: {}} implies &struct{}{}. 1169 // In-place update is of course impossible, 1170 // and no storebuf is needed. 1171 b.assign(fn, &loc, e.Value, true, nil, e) 1172 } 1173 sb.store(&address{addr: addr, expr: e}, m, e) 1174 1175 default: 1176 panic("unexpected CompositeLit type: " + t.String()) 1177 } 1178} 1179 1180func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1181 if s.Tag == nil { 1182 b.switchStmtDynamic(fn, s, label) 1183 return 1184 } 1185 dynamic := false 1186 for _, iclause := range s.Body.List { 1187 clause := iclause.(*ast.CaseClause) 1188 for _, cond := range clause.List { 1189 if fn.Pkg.info.Types[unparen(cond)].Value == nil { 1190 dynamic = true 1191 break 1192 } 1193 } 1194 } 1195 1196 if dynamic { 1197 b.switchStmtDynamic(fn, s, label) 1198 return 1199 } 1200 1201 if s.Init != nil { 1202 b.stmt(fn, s.Init) 1203 } 1204 1205 entry := fn.currentBlock 1206 tag := b.expr(fn, s.Tag) 1207 1208 heads := make([]*BasicBlock, 0, len(s.Body.List)) 1209 bodies := make([]*BasicBlock, len(s.Body.List)) 1210 conds := make([]Value, 0, len(s.Body.List)) 1211 1212 hasDefault := false 1213 done := fn.newBasicBlock(fmt.Sprintf("switch.done")) 1214 if label != nil { 1215 label._break = done 1216 } 1217 for i, stmt := range s.Body.List { 1218 body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i)) 1219 bodies[i] = body 1220 cas := stmt.(*ast.CaseClause) 1221 if cas.List == nil { 1222 // default branch 1223 hasDefault = true 1224 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i)) 1225 conds = append(conds, nil) 1226 heads = append(heads, head) 1227 fn.currentBlock = head 1228 emitJump(fn, body, cas) 1229 } 1230 for j, cond := range stmt.(*ast.CaseClause).List { 1231 fn.currentBlock = entry 1232 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j)) 1233 conds = append(conds, b.expr(fn, cond)) 1234 heads = append(heads, head) 1235 fn.currentBlock = head 1236 emitJump(fn, body, cond) 1237 } 1238 } 1239 1240 for i, stmt := range s.Body.List { 1241 clause := stmt.(*ast.CaseClause) 1242 body := bodies[i] 1243 fn.currentBlock = body 1244 fallthru := done 1245 if i+1 < len(bodies) { 1246 fallthru = bodies[i+1] 1247 } 1248 fn.targets = &targets{ 1249 tail: fn.targets, 1250 _break: done, 1251 _fallthrough: fallthru, 1252 } 1253 b.stmtList(fn, clause.Body) 1254 fn.targets = fn.targets.tail 1255 emitJump(fn, done, stmt) 1256 } 1257 1258 if !hasDefault { 1259 head := fn.newBasicBlock(fmt.Sprintf("switch.head.implicit-default")) 1260 body := fn.newBasicBlock("switch.body.implicit-default") 1261 fn.currentBlock = head 1262 emitJump(fn, body, s) 1263 fn.currentBlock = body 1264 emitJump(fn, done, s) 1265 heads = append(heads, head) 1266 conds = append(conds, nil) 1267 } 1268 1269 if len(heads) != len(conds) { 1270 panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds))) 1271 } 1272 for _, head := range heads { 1273 addEdge(entry, head) 1274 } 1275 fn.currentBlock = entry 1276 entry.emit(&ConstantSwitch{ 1277 Tag: tag, 1278 Conds: conds, 1279 }, s) 1280 fn.currentBlock = done 1281} 1282 1283// switchStmt emits to fn code for the switch statement s, optionally 1284// labelled by label. 1285// 1286func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) { 1287 // We treat SwitchStmt like a sequential if-else chain. 1288 // Multiway dispatch can be recovered later by irutil.Switches() 1289 // to those cases that are free of side effects. 1290 if s.Init != nil { 1291 b.stmt(fn, s.Init) 1292 } 1293 kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool)) 1294 1295 var tagv Value = kTrue 1296 var tagSource ast.Node = s 1297 if s.Tag != nil { 1298 tagv = b.expr(fn, s.Tag) 1299 tagSource = s.Tag 1300 } 1301 // lifting only considers loads and stores, but we want different 1302 // sigma nodes for the different comparisons. use a temporary and 1303 // load it in every branch. 1304 tag := fn.addLocal(tagv.Type(), tagSource) 1305 emitStore(fn, tag, tagv, tagSource) 1306 1307 done := fn.newBasicBlock("switch.done") 1308 if label != nil { 1309 label._break = done 1310 } 1311 // We pull the default case (if present) down to the end. 1312 // But each fallthrough label must point to the next 1313 // body block in source order, so we preallocate a 1314 // body block (fallthru) for the next case. 1315 // Unfortunately this makes for a confusing block order. 1316 var dfltBody *[]ast.Stmt 1317 var dfltFallthrough *BasicBlock 1318 var fallthru, dfltBlock *BasicBlock 1319 ncases := len(s.Body.List) 1320 for i, clause := range s.Body.List { 1321 body := fallthru 1322 if body == nil { 1323 body = fn.newBasicBlock("switch.body") // first case only 1324 } 1325 1326 // Preallocate body block for the next case. 1327 fallthru = done 1328 if i+1 < ncases { 1329 fallthru = fn.newBasicBlock("switch.body") 1330 } 1331 1332 cc := clause.(*ast.CaseClause) 1333 if cc.List == nil { 1334 // Default case. 1335 dfltBody = &cc.Body 1336 dfltFallthrough = fallthru 1337 dfltBlock = body 1338 continue 1339 } 1340 1341 var nextCond *BasicBlock 1342 for _, cond := range cc.List { 1343 nextCond = fn.newBasicBlock("switch.next") 1344 if tagv == kTrue { 1345 // emit a proper if/else chain instead of a comparison 1346 // of a value against true. 1347 // 1348 // NOTE(dh): adonovan had a todo saying "don't forget 1349 // conversions though". As far as I can tell, there 1350 // aren't any conversions that we need to take care of 1351 // here. `case bool(a) && bool(b)` as well as `case 1352 // bool(a && b)` are being taken care of by b.cond, 1353 // and `case a` where a is not of type bool is 1354 // invalid. 1355 b.cond(fn, cond, body, nextCond) 1356 } else { 1357 cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond) 1358 emitIf(fn, cond, body, nextCond, cond.Source()) 1359 } 1360 1361 fn.currentBlock = nextCond 1362 } 1363 fn.currentBlock = body 1364 fn.targets = &targets{ 1365 tail: fn.targets, 1366 _break: done, 1367 _fallthrough: fallthru, 1368 } 1369 b.stmtList(fn, cc.Body) 1370 fn.targets = fn.targets.tail 1371 emitJump(fn, done, s) 1372 fn.currentBlock = nextCond 1373 } 1374 if dfltBlock != nil { 1375 // The lack of a Source for the jump doesn't matter, block 1376 // fusing will get rid of the jump later. 1377 1378 emitJump(fn, dfltBlock, s) 1379 fn.currentBlock = dfltBlock 1380 fn.targets = &targets{ 1381 tail: fn.targets, 1382 _break: done, 1383 _fallthrough: dfltFallthrough, 1384 } 1385 b.stmtList(fn, *dfltBody) 1386 fn.targets = fn.targets.tail 1387 } 1388 emitJump(fn, done, s) 1389 fn.currentBlock = done 1390} 1391 1392func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1393 if s.Init != nil { 1394 b.stmt(fn, s.Init) 1395 } 1396 1397 var tag Value 1398 switch e := s.Assign.(type) { 1399 case *ast.ExprStmt: // x.(type) 1400 tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X) 1401 case *ast.AssignStmt: // y := x.(type) 1402 tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X) 1403 default: 1404 panic("unreachable") 1405 } 1406 tagPtr := fn.addLocal(tag.Type(), tag.Source()) 1407 emitStore(fn, tagPtr, tag, tag.Source()) 1408 1409 // +1 in case there's no explicit default case 1410 heads := make([]*BasicBlock, 0, len(s.Body.List)+1) 1411 1412 entry := fn.currentBlock 1413 done := fn.newBasicBlock("done") 1414 if label != nil { 1415 label._break = done 1416 } 1417 1418 // set up type switch and constant switch, populate their conditions 1419 tswtch := &TypeSwitch{ 1420 Tag: emitLoad(fn, tagPtr, tag.Source()), 1421 Conds: make([]types.Type, 0, len(s.Body.List)+1), 1422 } 1423 cswtch := &ConstantSwitch{ 1424 Conds: make([]Value, 0, len(s.Body.List)+1), 1425 } 1426 1427 rets := make([]types.Type, 0, len(s.Body.List)+1) 1428 index := 0 1429 var default_ *ast.CaseClause 1430 for _, clause := range s.Body.List { 1431 cc := clause.(*ast.CaseClause) 1432 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1433 fn.addNamedLocal(obj, cc) 1434 } 1435 if cc.List == nil { 1436 // default case 1437 default_ = cc 1438 } else { 1439 for _, expr := range cc.List { 1440 tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr)) 1441 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index)))) 1442 index++ 1443 } 1444 if len(cc.List) == 1 { 1445 rets = append(rets, fn.Pkg.typeOf(cc.List[0])) 1446 } else { 1447 for range cc.List { 1448 rets = append(rets, tag.Type()) 1449 } 1450 } 1451 } 1452 } 1453 1454 // default branch 1455 rets = append(rets, tag.Type()) 1456 1457 var vars []*types.Var 1458 vars = append(vars, varIndex) 1459 for _, typ := range rets { 1460 vars = append(vars, anonVar(typ)) 1461 } 1462 tswtch.setType(types.NewTuple(vars...)) 1463 // default branch 1464 fn.currentBlock = entry 1465 fn.emit(tswtch, s) 1466 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1)))) 1467 // in theory we should add a local and stores/loads for tswtch, to 1468 // generate sigma nodes in the branches. however, there isn't any 1469 // useful information we could possibly attach to it. 1470 cswtch.Tag = emitExtract(fn, tswtch, 0, s) 1471 fn.emit(cswtch, s) 1472 1473 // build heads and bodies 1474 index = 0 1475 for _, clause := range s.Body.List { 1476 cc := clause.(*ast.CaseClause) 1477 if cc.List == nil { 1478 continue 1479 } 1480 1481 body := fn.newBasicBlock("typeswitch.body") 1482 for _, expr := range cc.List { 1483 head := fn.newBasicBlock("typeswitch.head") 1484 heads = append(heads, head) 1485 fn.currentBlock = head 1486 1487 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1488 // In a switch y := x.(type), each case clause 1489 // implicitly declares a distinct object y. 1490 // In a single-type case, y has that type. 1491 // In multi-type cases, 'case nil' and default, 1492 // y has the same type as the interface operand. 1493 1494 l := fn.objects[obj] 1495 if rets[index] == tUntypedNil { 1496 emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type())), s.Assign) 1497 } else { 1498 x := emitExtract(fn, tswtch, index+1, s.Assign) 1499 emitStore(fn, l, x, nil) 1500 } 1501 } 1502 1503 emitJump(fn, body, expr) 1504 index++ 1505 } 1506 fn.currentBlock = body 1507 fn.targets = &targets{ 1508 tail: fn.targets, 1509 _break: done, 1510 } 1511 b.stmtList(fn, cc.Body) 1512 fn.targets = fn.targets.tail 1513 emitJump(fn, done, clause) 1514 } 1515 1516 if default_ == nil { 1517 // implicit default 1518 heads = append(heads, done) 1519 } else { 1520 body := fn.newBasicBlock("typeswitch.default") 1521 heads = append(heads, body) 1522 fn.currentBlock = body 1523 fn.targets = &targets{ 1524 tail: fn.targets, 1525 _break: done, 1526 } 1527 if obj := fn.Pkg.info.Implicits[default_]; obj != nil { 1528 l := fn.objects[obj] 1529 x := emitExtract(fn, tswtch, index+1, s.Assign) 1530 emitStore(fn, l, x, s) 1531 } 1532 b.stmtList(fn, default_.Body) 1533 fn.targets = fn.targets.tail 1534 emitJump(fn, done, s) 1535 } 1536 1537 fn.currentBlock = entry 1538 for _, head := range heads { 1539 addEdge(entry, head) 1540 } 1541 fn.currentBlock = done 1542} 1543 1544// selectStmt emits to fn code for the select statement s, optionally 1545// labelled by label. 1546// 1547func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) { 1548 if len(s.Body.List) == 0 { 1549 instr := &Select{Blocking: true} 1550 instr.setType(types.NewTuple(varIndex, varOk)) 1551 fn.emit(instr, s) 1552 fn.emit(new(Unreachable), s) 1553 addEdge(fn.currentBlock, fn.Exit) 1554 return true 1555 } 1556 1557 // A blocking select of a single case degenerates to a 1558 // simple send or receive. 1559 // TODO(adonovan): opt: is this optimization worth its weight? 1560 if len(s.Body.List) == 1 { 1561 clause := s.Body.List[0].(*ast.CommClause) 1562 if clause.Comm != nil { 1563 b.stmt(fn, clause.Comm) 1564 done := fn.newBasicBlock("select.done") 1565 if label != nil { 1566 label._break = done 1567 } 1568 fn.targets = &targets{ 1569 tail: fn.targets, 1570 _break: done, 1571 } 1572 b.stmtList(fn, clause.Body) 1573 fn.targets = fn.targets.tail 1574 emitJump(fn, done, clause) 1575 fn.currentBlock = done 1576 return false 1577 } 1578 } 1579 1580 // First evaluate all channels in all cases, and find 1581 // the directions of each state. 1582 var states []*SelectState 1583 blocking := true 1584 debugInfo := fn.debugInfo() 1585 for _, clause := range s.Body.List { 1586 var st *SelectState 1587 switch comm := clause.(*ast.CommClause).Comm.(type) { 1588 case nil: // default case 1589 blocking = false 1590 continue 1591 1592 case *ast.SendStmt: // ch<- i 1593 ch := b.expr(fn, comm.Chan) 1594 st = &SelectState{ 1595 Dir: types.SendOnly, 1596 Chan: ch, 1597 Send: emitConv(fn, b.expr(fn, comm.Value), 1598 ch.Type().Underlying().(*types.Chan).Elem(), comm), 1599 Pos: comm.Arrow, 1600 } 1601 if debugInfo { 1602 st.DebugNode = comm 1603 } 1604 1605 case *ast.AssignStmt: // x := <-ch 1606 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1607 st = &SelectState{ 1608 Dir: types.RecvOnly, 1609 Chan: b.expr(fn, recv.X), 1610 Pos: recv.OpPos, 1611 } 1612 if debugInfo { 1613 st.DebugNode = recv 1614 } 1615 1616 case *ast.ExprStmt: // <-ch 1617 recv := unparen(comm.X).(*ast.UnaryExpr) 1618 st = &SelectState{ 1619 Dir: types.RecvOnly, 1620 Chan: b.expr(fn, recv.X), 1621 Pos: recv.OpPos, 1622 } 1623 if debugInfo { 1624 st.DebugNode = recv 1625 } 1626 } 1627 states = append(states, st) 1628 } 1629 1630 // We dispatch on the (fair) result of Select using a 1631 // switch on the returned index. 1632 sel := &Select{ 1633 States: states, 1634 Blocking: blocking, 1635 } 1636 sel.source = s 1637 var vars []*types.Var 1638 vars = append(vars, varIndex, varOk) 1639 for _, st := range states { 1640 if st.Dir == types.RecvOnly { 1641 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() 1642 vars = append(vars, anonVar(tElem)) 1643 } 1644 } 1645 sel.setType(types.NewTuple(vars...)) 1646 fn.emit(sel, s) 1647 idx := emitExtract(fn, sel, 0, s) 1648 1649 done := fn.newBasicBlock("select.done") 1650 if label != nil { 1651 label._break = done 1652 } 1653 1654 entry := fn.currentBlock 1655 swtch := &ConstantSwitch{ 1656 Tag: idx, 1657 // one condition per case 1658 Conds: make([]Value, 0, len(s.Body.List)+1), 1659 } 1660 // note that we don't need heads; a select case can only have a single condition 1661 var bodies []*BasicBlock 1662 1663 state := 0 1664 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1665 for _, cc := range s.Body.List { 1666 clause := cc.(*ast.CommClause) 1667 if clause.Comm == nil { 1668 body := fn.newBasicBlock("select.default") 1669 fn.currentBlock = body 1670 bodies = append(bodies, body) 1671 fn.targets = &targets{ 1672 tail: fn.targets, 1673 _break: done, 1674 } 1675 b.stmtList(fn, clause.Body) 1676 emitJump(fn, done, s) 1677 fn.targets = fn.targets.tail 1678 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1))) 1679 continue 1680 } 1681 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state)))) 1682 body := fn.newBasicBlock("select.body") 1683 fn.currentBlock = body 1684 bodies = append(bodies, body) 1685 fn.targets = &targets{ 1686 tail: fn.targets, 1687 _break: done, 1688 } 1689 switch comm := clause.Comm.(type) { 1690 case *ast.ExprStmt: // <-ch 1691 if debugInfo { 1692 v := emitExtract(fn, sel, r, comm) 1693 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1694 } 1695 r++ 1696 1697 case *ast.AssignStmt: // x := <-states[state].Chan 1698 if comm.Tok == token.DEFINE { 1699 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) 1700 } 1701 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1702 v := emitExtract(fn, sel, r, comm) 1703 if debugInfo { 1704 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1705 } 1706 x.store(fn, v, comm) 1707 1708 if len(comm.Lhs) == 2 { // x, ok := ... 1709 if comm.Tok == token.DEFINE { 1710 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) 1711 } 1712 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1713 ok.store(fn, emitExtract(fn, sel, 1, comm), comm) 1714 } 1715 r++ 1716 } 1717 b.stmtList(fn, clause.Body) 1718 fn.targets = fn.targets.tail 1719 emitJump(fn, done, s) 1720 state++ 1721 } 1722 fn.currentBlock = entry 1723 fn.emit(swtch, s) 1724 for _, body := range bodies { 1725 addEdge(entry, body) 1726 } 1727 fn.currentBlock = done 1728 return false 1729} 1730 1731// forStmt emits to fn code for the for statement s, optionally 1732// labelled by label. 1733// 1734func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1735 // ...init... 1736 // jump loop 1737 // loop: 1738 // if cond goto body else done 1739 // body: 1740 // ...body... 1741 // jump post 1742 // post: (target of continue) 1743 // ...post... 1744 // jump loop 1745 // done: (target of break) 1746 if s.Init != nil { 1747 b.stmt(fn, s.Init) 1748 } 1749 body := fn.newBasicBlock("for.body") 1750 done := fn.newBasicBlock("for.done") // target of 'break' 1751 loop := body // target of back-edge 1752 if s.Cond != nil { 1753 loop = fn.newBasicBlock("for.loop") 1754 } 1755 cont := loop // target of 'continue' 1756 if s.Post != nil { 1757 cont = fn.newBasicBlock("for.post") 1758 } 1759 if label != nil { 1760 label._break = done 1761 label._continue = cont 1762 } 1763 emitJump(fn, loop, s) 1764 fn.currentBlock = loop 1765 if loop != body { 1766 b.cond(fn, s.Cond, body, done) 1767 fn.currentBlock = body 1768 } 1769 fn.targets = &targets{ 1770 tail: fn.targets, 1771 _break: done, 1772 _continue: cont, 1773 } 1774 b.stmt(fn, s.Body) 1775 fn.targets = fn.targets.tail 1776 emitJump(fn, cont, s) 1777 1778 if s.Post != nil { 1779 fn.currentBlock = cont 1780 b.stmt(fn, s.Post) 1781 emitJump(fn, loop, s) // back-edge 1782 } 1783 fn.currentBlock = done 1784} 1785 1786// rangeIndexed emits to fn the header for an integer-indexed loop 1787// over array, *array or slice value x. 1788// The v result is defined only if tv is non-nil. 1789// forPos is the position of the "for" token. 1790// 1791func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { 1792 // 1793 // length = len(x) 1794 // index = -1 1795 // loop: (target of continue) 1796 // index++ 1797 // if index < length goto body else done 1798 // body: 1799 // k = index 1800 // v = x[index] 1801 // ...body... 1802 // jump loop 1803 // done: (target of break) 1804 1805 // Determine number of iterations. 1806 var length Value 1807 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { 1808 // For array or *array, the number of iterations is 1809 // known statically thanks to the type. We avoid a 1810 // data dependence upon x, permitting later dead-code 1811 // elimination if x is pure, static unrolling, etc. 1812 // Ranging over a nil *array may have >0 iterations. 1813 // We still generate code for x, in case it has effects. 1814 length = emitConst(fn, intConst(arr.Len())) 1815 } else { 1816 // length = len(x). 1817 var c Call 1818 c.Call.Value = makeLen(x.Type()) 1819 c.Call.Args = []Value{x} 1820 c.setType(tInt) 1821 length = fn.emit(&c, source) 1822 } 1823 1824 index := fn.addLocal(tInt, source) 1825 emitStore(fn, index, emitConst(fn, intConst(-1)), source) 1826 1827 loop = fn.newBasicBlock("rangeindex.loop") 1828 emitJump(fn, loop, source) 1829 fn.currentBlock = loop 1830 1831 incr := &BinOp{ 1832 Op: token.ADD, 1833 X: emitLoad(fn, index, source), 1834 Y: emitConst(fn, intConst(1)), 1835 } 1836 incr.setType(tInt) 1837 emitStore(fn, index, fn.emit(incr, source), source) 1838 1839 body := fn.newBasicBlock("rangeindex.body") 1840 done = fn.newBasicBlock("rangeindex.done") 1841 emitIf(fn, emitCompare(fn, token.LSS, incr, length, source), body, done, source) 1842 fn.currentBlock = body 1843 1844 k = emitLoad(fn, index, source) 1845 if tv != nil { 1846 switch t := x.Type().Underlying().(type) { 1847 case *types.Array: 1848 instr := &Index{ 1849 X: x, 1850 Index: k, 1851 } 1852 instr.setType(t.Elem()) 1853 v = fn.emit(instr, source) 1854 1855 case *types.Pointer: // *array 1856 instr := &IndexAddr{ 1857 X: x, 1858 Index: k, 1859 } 1860 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 1861 v = emitLoad(fn, fn.emit(instr, source), source) 1862 1863 case *types.Slice: 1864 instr := &IndexAddr{ 1865 X: x, 1866 Index: k, 1867 } 1868 instr.setType(types.NewPointer(t.Elem())) 1869 v = emitLoad(fn, fn.emit(instr, source), source) 1870 1871 default: 1872 panic("rangeIndexed x:" + t.String()) 1873 } 1874 } 1875 return 1876} 1877 1878// rangeIter emits to fn the header for a loop using 1879// Range/Next/Extract to iterate over map or string value x. 1880// tk and tv are the types of the key/value results k and v, or nil 1881// if the respective component is not wanted. 1882// 1883func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { 1884 // 1885 // it = range x 1886 // loop: (target of continue) 1887 // okv = next it (ok, key, value) 1888 // ok = extract okv #0 1889 // if ok goto body else done 1890 // body: 1891 // k = extract okv #1 1892 // v = extract okv #2 1893 // ...body... 1894 // jump loop 1895 // done: (target of break) 1896 // 1897 1898 if tk == nil { 1899 tk = tInvalid 1900 } 1901 if tv == nil { 1902 tv = tInvalid 1903 } 1904 1905 rng := &Range{X: x} 1906 rng.setType(tRangeIter) 1907 it := fn.emit(rng, source) 1908 1909 loop = fn.newBasicBlock("rangeiter.loop") 1910 emitJump(fn, loop, source) 1911 fn.currentBlock = loop 1912 1913 _, isString := x.Type().Underlying().(*types.Basic) 1914 1915 okv := &Next{ 1916 Iter: it, 1917 IsString: isString, 1918 } 1919 okv.setType(types.NewTuple( 1920 varOk, 1921 newVar("k", tk), 1922 newVar("v", tv), 1923 )) 1924 fn.emit(okv, source) 1925 1926 body := fn.newBasicBlock("rangeiter.body") 1927 done = fn.newBasicBlock("rangeiter.done") 1928 emitIf(fn, emitExtract(fn, okv, 0, source), body, done, source) 1929 fn.currentBlock = body 1930 1931 if tk != tInvalid { 1932 k = emitExtract(fn, okv, 1, source) 1933 } 1934 if tv != tInvalid { 1935 v = emitExtract(fn, okv, 2, source) 1936 } 1937 return 1938} 1939 1940// rangeChan emits to fn the header for a loop that receives from 1941// channel x until it fails. 1942// tk is the channel's element type, or nil if the k result is 1943// not wanted 1944// pos is the position of the '=' or ':=' token. 1945// 1946func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) { 1947 // 1948 // loop: (target of continue) 1949 // ko = <-x (key, ok) 1950 // ok = extract ko #1 1951 // if ok goto body else done 1952 // body: 1953 // k = extract ko #0 1954 // ... 1955 // goto loop 1956 // done: (target of break) 1957 1958 loop = fn.newBasicBlock("rangechan.loop") 1959 emitJump(fn, loop, source) 1960 fn.currentBlock = loop 1961 retv := emitRecv(fn, x, true, types.NewTuple(newVar("k", x.Type().Underlying().(*types.Chan).Elem()), varOk), source) 1962 body := fn.newBasicBlock("rangechan.body") 1963 done = fn.newBasicBlock("rangechan.done") 1964 emitIf(fn, emitExtract(fn, retv, 1, source), body, done, source) 1965 fn.currentBlock = body 1966 if tk != nil { 1967 k = emitExtract(fn, retv, 0, source) 1968 } 1969 return 1970} 1971 1972// rangeStmt emits to fn code for the range statement s, optionally 1973// labelled by label. 1974// 1975func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) { 1976 var tk, tv types.Type 1977 if s.Key != nil && !isBlankIdent(s.Key) { 1978 tk = fn.Pkg.typeOf(s.Key) 1979 } 1980 if s.Value != nil && !isBlankIdent(s.Value) { 1981 tv = fn.Pkg.typeOf(s.Value) 1982 } 1983 1984 // If iteration variables are defined (:=), this 1985 // occurs once outside the loop. 1986 // 1987 // Unlike a short variable declaration, a RangeStmt 1988 // using := never redeclares an existing variable; it 1989 // always creates a new one. 1990 if s.Tok == token.DEFINE { 1991 if tk != nil { 1992 fn.addLocalForIdent(s.Key.(*ast.Ident)) 1993 } 1994 if tv != nil { 1995 fn.addLocalForIdent(s.Value.(*ast.Ident)) 1996 } 1997 } 1998 1999 x := b.expr(fn, s.X) 2000 2001 var k, v Value 2002 var loop, done *BasicBlock 2003 switch rt := x.Type().Underlying().(type) { 2004 case *types.Slice, *types.Array, *types.Pointer: // *array 2005 k, v, loop, done = b.rangeIndexed(fn, x, tv, source) 2006 2007 case *types.Chan: 2008 k, loop, done = b.rangeChan(fn, x, tk, source) 2009 2010 case *types.Map, *types.Basic: // string 2011 k, v, loop, done = b.rangeIter(fn, x, tk, tv, source) 2012 2013 default: 2014 panic("Cannot range over: " + rt.String()) 2015 } 2016 2017 // Evaluate both LHS expressions before we update either. 2018 var kl, vl lvalue 2019 if tk != nil { 2020 kl = b.addr(fn, s.Key, false) // non-escaping 2021 } 2022 if tv != nil { 2023 vl = b.addr(fn, s.Value, false) // non-escaping 2024 } 2025 if tk != nil { 2026 kl.store(fn, k, s) 2027 } 2028 if tv != nil { 2029 vl.store(fn, v, s) 2030 } 2031 2032 if label != nil { 2033 label._break = done 2034 label._continue = loop 2035 } 2036 2037 fn.targets = &targets{ 2038 tail: fn.targets, 2039 _break: done, 2040 _continue: loop, 2041 } 2042 b.stmt(fn, s.Body) 2043 fn.targets = fn.targets.tail 2044 emitJump(fn, loop, source) // back-edge 2045 fn.currentBlock = done 2046} 2047 2048// stmt lowers statement s to IR form, emitting code to fn. 2049func (b *builder) stmt(fn *Function, _s ast.Stmt) { 2050 // The label of the current statement. If non-nil, its _goto 2051 // target is always set; its _break and _continue are set only 2052 // within the body of switch/typeswitch/select/for/range. 2053 // It is effectively an additional default-nil parameter of stmt(). 2054 var label *lblock 2055start: 2056 switch s := _s.(type) { 2057 case *ast.EmptyStmt: 2058 // ignore. (Usually removed by gofmt.) 2059 2060 case *ast.DeclStmt: // Con, Var or Typ 2061 d := s.Decl.(*ast.GenDecl) 2062 if d.Tok == token.VAR { 2063 for _, spec := range d.Specs { 2064 if vs, ok := spec.(*ast.ValueSpec); ok { 2065 b.localValueSpec(fn, vs) 2066 } 2067 } 2068 } 2069 2070 case *ast.LabeledStmt: 2071 label = fn.labelledBlock(s.Label) 2072 emitJump(fn, label._goto, s) 2073 fn.currentBlock = label._goto 2074 _s = s.Stmt 2075 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 2076 2077 case *ast.ExprStmt: 2078 b.expr(fn, s.X) 2079 2080 case *ast.SendStmt: 2081 instr := &Send{ 2082 Chan: b.expr(fn, s.Chan), 2083 X: emitConv(fn, b.expr(fn, s.Value), 2084 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem(), s), 2085 } 2086 fn.emit(instr, s) 2087 2088 case *ast.IncDecStmt: 2089 op := token.ADD 2090 if s.Tok == token.DEC { 2091 op = token.SUB 2092 } 2093 loc := b.addr(fn, s.X, false) 2094 b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ())), op, s) 2095 2096 case *ast.AssignStmt: 2097 switch s.Tok { 2098 case token.ASSIGN, token.DEFINE: 2099 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s) 2100 2101 default: // +=, etc. 2102 op := s.Tok + token.ADD - token.ADD_ASSIGN 2103 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s) 2104 } 2105 2106 case *ast.GoStmt: 2107 // The "intrinsics" new/make/len/cap are forbidden here. 2108 // panic is treated like an ordinary function call. 2109 v := Go{} 2110 b.setCall(fn, s.Call, &v.Call) 2111 fn.emit(&v, s) 2112 2113 case *ast.DeferStmt: 2114 // The "intrinsics" new/make/len/cap are forbidden here. 2115 // panic is treated like an ordinary function call. 2116 v := Defer{} 2117 b.setCall(fn, s.Call, &v.Call) 2118 fn.hasDefer = true 2119 fn.emit(&v, s) 2120 2121 case *ast.ReturnStmt: 2122 // TODO(dh): we could emit tigher position information by 2123 // using the ith returned expression 2124 2125 var results []Value 2126 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2127 // Return of one expression in a multi-valued function. 2128 tuple := b.exprN(fn, s.Results[0]) 2129 ttuple := tuple.Type().(*types.Tuple) 2130 for i, n := 0, ttuple.Len(); i < n; i++ { 2131 results = append(results, 2132 emitConv(fn, emitExtract(fn, tuple, i, s), 2133 fn.Signature.Results().At(i).Type(), s)) 2134 } 2135 } else { 2136 // 1:1 return, or no-arg return in non-void function. 2137 for i, r := range s.Results { 2138 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s) 2139 results = append(results, v) 2140 } 2141 } 2142 2143 ret := fn.results() 2144 for i, r := range results { 2145 emitStore(fn, ret[i], r, s) 2146 } 2147 2148 emitJump(fn, fn.Exit, s) 2149 fn.currentBlock = fn.newBasicBlock("unreachable") 2150 2151 case *ast.BranchStmt: 2152 var block *BasicBlock 2153 switch s.Tok { 2154 case token.BREAK: 2155 if s.Label != nil { 2156 block = fn.labelledBlock(s.Label)._break 2157 } else { 2158 for t := fn.targets; t != nil && block == nil; t = t.tail { 2159 block = t._break 2160 } 2161 } 2162 2163 case token.CONTINUE: 2164 if s.Label != nil { 2165 block = fn.labelledBlock(s.Label)._continue 2166 } else { 2167 for t := fn.targets; t != nil && block == nil; t = t.tail { 2168 block = t._continue 2169 } 2170 } 2171 2172 case token.FALLTHROUGH: 2173 for t := fn.targets; t != nil && block == nil; t = t.tail { 2174 block = t._fallthrough 2175 } 2176 2177 case token.GOTO: 2178 block = fn.labelledBlock(s.Label)._goto 2179 } 2180 j := emitJump(fn, block, s) 2181 j.Comment = s.Tok.String() 2182 fn.currentBlock = fn.newBasicBlock("unreachable") 2183 2184 case *ast.BlockStmt: 2185 b.stmtList(fn, s.List) 2186 2187 case *ast.IfStmt: 2188 if s.Init != nil { 2189 b.stmt(fn, s.Init) 2190 } 2191 then := fn.newBasicBlock("if.then") 2192 done := fn.newBasicBlock("if.done") 2193 els := done 2194 if s.Else != nil { 2195 els = fn.newBasicBlock("if.else") 2196 } 2197 instr := b.cond(fn, s.Cond, then, els) 2198 instr.source = s 2199 fn.currentBlock = then 2200 b.stmt(fn, s.Body) 2201 emitJump(fn, done, s) 2202 2203 if s.Else != nil { 2204 fn.currentBlock = els 2205 b.stmt(fn, s.Else) 2206 emitJump(fn, done, s) 2207 } 2208 2209 fn.currentBlock = done 2210 2211 case *ast.SwitchStmt: 2212 b.switchStmt(fn, s, label) 2213 2214 case *ast.TypeSwitchStmt: 2215 b.typeSwitchStmt(fn, s, label) 2216 2217 case *ast.SelectStmt: 2218 if b.selectStmt(fn, s, label) { 2219 // the select has no cases, it blocks forever 2220 fn.currentBlock = fn.newBasicBlock("unreachable") 2221 } 2222 2223 case *ast.ForStmt: 2224 b.forStmt(fn, s, label) 2225 2226 case *ast.RangeStmt: 2227 b.rangeStmt(fn, s, label, s) 2228 2229 default: 2230 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2231 } 2232} 2233 2234// buildFunction builds IR code for the body of function fn. Idempotent. 2235func (b *builder) buildFunction(fn *Function) { 2236 if fn.Blocks != nil { 2237 return // building already started 2238 } 2239 2240 var recvField *ast.FieldList 2241 var body *ast.BlockStmt 2242 var functype *ast.FuncType 2243 switch n := fn.source.(type) { 2244 case nil: 2245 return // not a Go source function. (Synthetic, or from object file.) 2246 case *ast.FuncDecl: 2247 functype = n.Type 2248 recvField = n.Recv 2249 body = n.Body 2250 case *ast.FuncLit: 2251 functype = n.Type 2252 body = n.Body 2253 default: 2254 panic(n) 2255 } 2256 2257 if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" { 2258 // syscall.Exit is a stub and the way os.Exit terminates the 2259 // process. Note that there are other functions in the runtime 2260 // that also terminate or unwind that we cannot analyze. 2261 // However, they aren't stubs, so buildExits ends up getting 2262 // called on them, so that's where we handle those special 2263 // cases. 2264 fn.WillExit = true 2265 } 2266 2267 if body == nil { 2268 // External function. 2269 if fn.Params == nil { 2270 // This condition ensures we add a non-empty 2271 // params list once only, but we may attempt 2272 // the degenerate empty case repeatedly. 2273 // TODO(adonovan): opt: don't do that. 2274 2275 // We set Function.Params even though there is no body 2276 // code to reference them. This simplifies clients. 2277 if recv := fn.Signature.Recv(); recv != nil { 2278 // XXX synthesize an ast.Node 2279 fn.addParamObj(recv, nil) 2280 } 2281 params := fn.Signature.Params() 2282 for i, n := 0, params.Len(); i < n; i++ { 2283 // XXX synthesize an ast.Node 2284 fn.addParamObj(params.At(i), nil) 2285 } 2286 } 2287 return 2288 } 2289 if fn.Prog.mode&LogSource != 0 { 2290 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))() 2291 } 2292 fn.blocksets = b.blocksets 2293 fn.startBody() 2294 fn.createSyntacticParams(recvField, functype) 2295 fn.exitBlock() 2296 b.stmt(fn, body) 2297 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) { 2298 // Control fell off the end of the function's body block. 2299 // 2300 // Block optimizations eliminate the current block, if 2301 // unreachable. It is a builder invariant that 2302 // if this no-arg return is ill-typed for 2303 // fn.Signature.Results, this block must be 2304 // unreachable. The sanity checker checks this. 2305 // fn.emit(new(RunDefers)) 2306 // fn.emit(new(Return)) 2307 emitJump(fn, fn.Exit, nil) 2308 } 2309 optimizeBlocks(fn) 2310 buildFakeExits(fn) 2311 b.buildExits(fn) 2312 b.addUnreachables(fn) 2313 fn.finishBody() 2314 b.blocksets = fn.blocksets 2315 fn.functionBody = nil 2316} 2317 2318// buildFuncDecl builds IR code for the function or method declared 2319// by decl in package pkg. 2320// 2321func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { 2322 id := decl.Name 2323 if isBlankIdent(id) { 2324 return // discard 2325 } 2326 fn := pkg.values[pkg.info.Defs[id]].(*Function) 2327 if decl.Recv == nil && id.Name == "init" { 2328 var v Call 2329 v.Call.Value = fn 2330 v.setType(types.NewTuple()) 2331 pkg.init.emit(&v, decl) 2332 } 2333 fn.source = decl 2334 b.buildFunction(fn) 2335} 2336 2337// Build calls Package.Build for each package in prog. 2338// 2339// Build is intended for whole-program analysis; a typical compiler 2340// need only build a single package. 2341// 2342// Build is idempotent and thread-safe. 2343// 2344func (prog *Program) Build() { 2345 for _, p := range prog.packages { 2346 p.Build() 2347 } 2348} 2349 2350// Build builds IR code for all functions and vars in package p. 2351// 2352// Precondition: CreatePackage must have been called for all of p's 2353// direct imports (and hence its direct imports must have been 2354// error-free). 2355// 2356// Build is idempotent and thread-safe. 2357// 2358func (p *Package) Build() { p.buildOnce.Do(p.build) } 2359 2360func (p *Package) build() { 2361 if p.info == nil { 2362 return // synthetic package, e.g. "testmain" 2363 } 2364 2365 // Ensure we have runtime type info for all exported members. 2366 // TODO(adonovan): ideally belongs in memberFromObject, but 2367 // that would require package creation in topological order. 2368 for name, mem := range p.Members { 2369 if ast.IsExported(name) { 2370 p.Prog.needMethodsOf(mem.Type()) 2371 } 2372 } 2373 if p.Prog.mode&LogSource != 0 { 2374 defer logStack("build %s", p)() 2375 } 2376 init := p.init 2377 init.startBody() 2378 init.exitBlock() 2379 2380 var done *BasicBlock 2381 2382 // Make init() skip if package is already initialized. 2383 initguard := p.Var("init$guard") 2384 doinit := init.newBasicBlock("init.start") 2385 done = init.Exit 2386 emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil) 2387 init.currentBlock = doinit 2388 emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool)), nil) 2389 2390 // Call the init() function of each package we import. 2391 for _, pkg := range p.Pkg.Imports() { 2392 prereq := p.Prog.packages[pkg] 2393 if prereq == nil { 2394 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 2395 } 2396 var v Call 2397 v.Call.Value = prereq.init 2398 v.setType(types.NewTuple()) 2399 init.emit(&v, nil) 2400 } 2401 2402 b := builder{ 2403 printFunc: p.printFunc, 2404 } 2405 2406 // Initialize package-level vars in correct order. 2407 for _, varinit := range p.info.InitOrder { 2408 if init.Prog.mode&LogSource != 0 { 2409 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2410 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2411 } 2412 if len(varinit.Lhs) == 1 { 2413 // 1:1 initialization: var x, y = a(), b() 2414 var lval lvalue 2415 if v := varinit.Lhs[0]; v.Name() != "_" { 2416 lval = &address{addr: p.values[v].(*Global)} 2417 } else { 2418 lval = blank{} 2419 } 2420 // TODO(dh): do emit position information 2421 b.assign(init, lval, varinit.Rhs, true, nil, nil) 2422 } else { 2423 // n:1 initialization: var x, y := f() 2424 tuple := b.exprN(init, varinit.Rhs) 2425 for i, v := range varinit.Lhs { 2426 if v.Name() == "_" { 2427 continue 2428 } 2429 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil) 2430 } 2431 } 2432 } 2433 2434 // Build all package-level functions, init functions 2435 // and methods, including unreachable/blank ones. 2436 // We build them in source order, but it's not significant. 2437 for _, file := range p.files { 2438 for _, decl := range file.Decls { 2439 if decl, ok := decl.(*ast.FuncDecl); ok { 2440 b.buildFuncDecl(p, decl) 2441 } 2442 } 2443 } 2444 2445 // Finish up init(). 2446 emitJump(init, done, nil) 2447 init.finishBody() 2448 2449 p.info = nil // We no longer need ASTs or go/types deductions. 2450 2451 if p.Prog.mode&SanityCheckFunctions != 0 { 2452 sanityCheckPackage(p) 2453 } 2454} 2455 2456// Like ObjectOf, but panics instead of returning nil. 2457// Only valid during p's create and build phases. 2458func (p *Package) objectOf(id *ast.Ident) types.Object { 2459 if o := p.info.ObjectOf(id); o != nil { 2460 return o 2461 } 2462 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", 2463 id.Name, p.Prog.Fset.Position(id.Pos()))) 2464} 2465 2466// Like TypeOf, but panics instead of returning nil. 2467// Only valid during p's create and build phases. 2468func (p *Package) typeOf(e ast.Expr) types.Type { 2469 if T := p.info.TypeOf(e); T != nil { 2470 return T 2471 } 2472 panic(fmt.Sprintf("no type for %T @ %s", 2473 e, p.Prog.Fset.Position(e.Pos()))) 2474} 2475