Source file src/cmd/compile/internal/walk/walk.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"internal/abi"
    10  	"internal/buildcfg"
    11  
    12  	"cmd/compile/internal/base"
    13  	"cmd/compile/internal/ir"
    14  	"cmd/compile/internal/reflectdata"
    15  	"cmd/compile/internal/rttype"
    16  	"cmd/compile/internal/ssagen"
    17  	"cmd/compile/internal/typecheck"
    18  	"cmd/compile/internal/types"
    19  	"cmd/internal/src"
    20  )
    21  
    22  // The constant is known to runtime.
    23  const tmpstringbufsize = 32
    24  
    25  func Walk(fn *ir.Func) {
    26  	ir.CurFunc = fn
    27  
    28  	// Set and then clear a package-level cache of static values for this fn.
    29  	// (At some point, it might be worthwhile to have a walkState structure
    30  	// that gets passed everywhere where things like this can go.)
    31  	staticValues = findStaticValues(fn)
    32  	defer func() { staticValues = nil }()
    33  
    34  	errorsBefore := base.Errors()
    35  	order(fn)
    36  	if base.Errors() > errorsBefore {
    37  		return
    38  	}
    39  
    40  	if base.Flag.W != 0 {
    41  		s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
    42  		ir.DumpList(s, ir.CurFunc.Body)
    43  	}
    44  
    45  	walkStmtList(ir.CurFunc.Body)
    46  	if base.Flag.W != 0 {
    47  		s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
    48  		ir.DumpList(s, ir.CurFunc.Body)
    49  	}
    50  
    51  	// Eagerly compute sizes of all variables for SSA.
    52  	for _, n := range fn.Dcl {
    53  		types.CalcSize(n.Type())
    54  	}
    55  }
    56  
    57  // walkRecv walks an ORECV node.
    58  func walkRecv(n *ir.UnaryExpr) ir.Node {
    59  	if n.Typecheck() == 0 {
    60  		base.Fatalf("missing typecheck: %+v", n)
    61  	}
    62  	init := ir.TakeInit(n)
    63  
    64  	n.X = walkExpr(n.X, &init)
    65  	call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
    66  	return ir.InitExpr(init, call)
    67  }
    68  
    69  func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
    70  	if n.Op() != ir.OAS {
    71  		base.Fatalf("convas: not OAS %v", n.Op())
    72  	}
    73  	n.SetTypecheck(1)
    74  
    75  	if n.X == nil || n.Y == nil {
    76  		return n
    77  	}
    78  
    79  	lt := n.X.Type()
    80  	rt := n.Y.Type()
    81  	if lt == nil || rt == nil {
    82  		return n
    83  	}
    84  
    85  	if ir.IsBlank(n.X) {
    86  		n.Y = typecheck.DefaultLit(n.Y, nil)
    87  		return n
    88  	}
    89  
    90  	if !types.Identical(lt, rt) {
    91  		n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
    92  		n.Y = walkExpr(n.Y, init)
    93  	}
    94  	types.CalcSize(n.Y.Type())
    95  
    96  	return n
    97  }
    98  
    99  func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
   100  	if init == nil {
   101  		base.Fatalf("mkcall with nil init: %v", fn)
   102  	}
   103  	if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
   104  		base.Fatalf("mkcall %v %v", fn, fn.Type())
   105  	}
   106  
   107  	n := fn.Type().NumParams()
   108  	if n != len(va) {
   109  		base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
   110  	}
   111  
   112  	call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
   113  	call.SetType(t)
   114  	return walkExpr(call, init).(*ir.CallExpr)
   115  }
   116  
   117  func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
   118  	return vmkcall(typecheck.LookupRuntime(name), t, init, args)
   119  }
   120  
   121  func mkcallstmt(name string, args ...ir.Node) ir.Node {
   122  	return mkcallstmt1(typecheck.LookupRuntime(name), args...)
   123  }
   124  
   125  func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
   126  	return vmkcall(fn, t, init, args)
   127  }
   128  
   129  func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
   130  	var init ir.Nodes
   131  	n := vmkcall(fn, nil, &init, args)
   132  	if len(init) == 0 {
   133  		return n
   134  	}
   135  	init.Append(n)
   136  	return ir.NewBlockStmt(n.Pos(), init)
   137  }
   138  
   139  func chanfn(name string, n int, t *types.Type) ir.Node {
   140  	if !t.IsChan() {
   141  		base.Fatalf("chanfn %v", t)
   142  	}
   143  	switch n {
   144  	case 1:
   145  		return typecheck.LookupRuntime(name, t.Elem())
   146  	case 2:
   147  		return typecheck.LookupRuntime(name, t.Elem(), t.Elem())
   148  	}
   149  	base.Fatalf("chanfn %d", n)
   150  	return nil
   151  }
   152  
   153  func mapfn(name string, t *types.Type, isfat bool) ir.Node {
   154  	if !t.IsMap() {
   155  		base.Fatalf("mapfn %v", t)
   156  	}
   157  	if mapfast(t) == mapslow || isfat {
   158  		return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key(), t.Elem())
   159  	}
   160  	return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Elem())
   161  }
   162  
   163  func mapfndel(name string, t *types.Type) ir.Node {
   164  	if !t.IsMap() {
   165  		base.Fatalf("mapfn %v", t)
   166  	}
   167  	if mapfast(t) == mapslow {
   168  		return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key())
   169  	}
   170  	return typecheck.LookupRuntime(name, t.Key(), t.Elem())
   171  }
   172  
   173  const (
   174  	mapslow = iota
   175  	mapfast32
   176  	mapfast32ptr
   177  	mapfast64
   178  	mapfast64ptr
   179  	mapfaststr
   180  	nmapfast
   181  )
   182  
   183  type mapnames [nmapfast]string
   184  
   185  func mkmapnames(base string, ptr string) mapnames {
   186  	return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
   187  }
   188  
   189  var mapaccess1 = mkmapnames("mapaccess1", "")
   190  var mapaccess2 = mkmapnames("mapaccess2", "")
   191  var mapassign = mkmapnames("mapassign", "ptr")
   192  var mapdelete = mkmapnames("mapdelete", "")
   193  
   194  func mapfast(t *types.Type) int {
   195  	if buildcfg.Experiment.SwissMap {
   196  		return mapfastSwiss(t)
   197  	}
   198  	return mapfastOld(t)
   199  }
   200  
   201  func mapfastSwiss(t *types.Type) int {
   202  	if t.Elem().Size() > abi.OldMapMaxElemBytes {
   203  		return mapslow
   204  	}
   205  	switch reflectdata.AlgType(t.Key()) {
   206  	case types.AMEM32:
   207  		if !t.Key().HasPointers() {
   208  			return mapfast32
   209  		}
   210  		if types.PtrSize == 4 {
   211  			return mapfast32ptr
   212  		}
   213  		base.Fatalf("small pointer %v", t.Key())
   214  	case types.AMEM64:
   215  		if !t.Key().HasPointers() {
   216  			return mapfast64
   217  		}
   218  		if types.PtrSize == 8 {
   219  			return mapfast64ptr
   220  		}
   221  		// Two-word object, at least one of which is a pointer.
   222  		// Use the slow path.
   223  	case types.ASTRING:
   224  		return mapfaststr
   225  	}
   226  	return mapslow
   227  }
   228  
   229  func mapfastOld(t *types.Type) int {
   230  	if t.Elem().Size() > abi.OldMapMaxElemBytes {
   231  		return mapslow
   232  	}
   233  	switch reflectdata.AlgType(t.Key()) {
   234  	case types.AMEM32:
   235  		if !t.Key().HasPointers() {
   236  			return mapfast32
   237  		}
   238  		if types.PtrSize == 4 {
   239  			return mapfast32ptr
   240  		}
   241  		base.Fatalf("small pointer %v", t.Key())
   242  	case types.AMEM64:
   243  		if !t.Key().HasPointers() {
   244  			return mapfast64
   245  		}
   246  		if types.PtrSize == 8 {
   247  			return mapfast64ptr
   248  		}
   249  		// Two-word object, at least one of which is a pointer.
   250  		// Use the slow path.
   251  	case types.ASTRING:
   252  		return mapfaststr
   253  	}
   254  	return mapslow
   255  }
   256  
   257  func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
   258  	walkExprListSafe(n.Args, init)
   259  
   260  	// walkExprListSafe will leave OINDEX (s[n]) alone if both s
   261  	// and n are name or literal, but those may index the slice we're
   262  	// modifying here. Fix explicitly.
   263  	ls := n.Args
   264  	for i1, n1 := range ls {
   265  		ls[i1] = cheapExpr(n1, init)
   266  	}
   267  }
   268  
   269  // appendWalkStmt typechecks and walks stmt and then appends it to init.
   270  func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
   271  	op := stmt.Op()
   272  	n := typecheck.Stmt(stmt)
   273  	if op == ir.OAS || op == ir.OAS2 {
   274  		// If the assignment has side effects, walkExpr will append them
   275  		// directly to init for us, while walkStmt will wrap it in an OBLOCK.
   276  		// We need to append them directly.
   277  		// TODO(rsc): Clean this up.
   278  		n = walkExpr(n, init)
   279  	} else {
   280  		n = walkStmt(n)
   281  	}
   282  	init.Append(n)
   283  }
   284  
   285  // The max number of defers in a function using open-coded defers. We enforce this
   286  // limit because the deferBits bitmask is currently a single byte (to minimize code size)
   287  const maxOpenDefers = 8
   288  
   289  // backingArrayPtrLen extracts the pointer and length from a slice or string.
   290  // This constructs two nodes referring to n, so n must be a cheapExpr.
   291  func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
   292  	var init ir.Nodes
   293  	c := cheapExpr(n, &init)
   294  	if c != n || len(init) != 0 {
   295  		base.Fatalf("backingArrayPtrLen not cheap: %v", n)
   296  	}
   297  	ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
   298  	if n.Type().IsString() {
   299  		ptr.SetType(types.Types[types.TUINT8].PtrTo())
   300  	} else {
   301  		ptr.SetType(n.Type().Elem().PtrTo())
   302  	}
   303  	ptr.SetTypecheck(1)
   304  	length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
   305  	length.SetType(types.Types[types.TINT])
   306  	length.SetTypecheck(1)
   307  	return ptr, length
   308  }
   309  
   310  // mayCall reports whether evaluating expression n may require
   311  // function calls, which could clobber function call arguments/results
   312  // currently on the stack.
   313  func mayCall(n ir.Node) bool {
   314  	// When instrumenting, any expression might require function calls.
   315  	if base.Flag.Cfg.Instrumenting {
   316  		return true
   317  	}
   318  
   319  	isSoftFloat := func(typ *types.Type) bool {
   320  		return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
   321  	}
   322  
   323  	return ir.Any(n, func(n ir.Node) bool {
   324  		// walk should have already moved any Init blocks off of
   325  		// expressions.
   326  		if len(n.Init()) != 0 {
   327  			base.FatalfAt(n.Pos(), "mayCall %+v", n)
   328  		}
   329  
   330  		switch n.Op() {
   331  		default:
   332  			base.FatalfAt(n.Pos(), "mayCall %+v", n)
   333  
   334  		case ir.OCALLFUNC, ir.OCALLINTER,
   335  			ir.OUNSAFEADD, ir.OUNSAFESLICE:
   336  			return true
   337  
   338  		case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
   339  			ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD,
   340  			ir.OSLICE2ARR, ir.OSLICE2ARRPTR:
   341  			// These ops might panic, make sure they are done
   342  			// before we start marshaling args for a call. See issue 16760.
   343  			return true
   344  
   345  		case ir.OANDAND, ir.OOROR:
   346  			n := n.(*ir.LogicalExpr)
   347  			// The RHS expression may have init statements that
   348  			// should only execute conditionally, and so cannot be
   349  			// pulled out to the top-level init list. We could try
   350  			// to be more precise here.
   351  			return len(n.Y.Init()) != 0
   352  
   353  		// When using soft-float, these ops might be rewritten to function calls
   354  		// so we ensure they are evaluated first.
   355  		case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
   356  			return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
   357  		case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
   358  			n := n.(*ir.BinaryExpr)
   359  			return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
   360  		case ir.OCONV:
   361  			n := n.(*ir.ConvExpr)
   362  			return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
   363  
   364  		case ir.OMIN, ir.OMAX:
   365  			// string or float requires runtime call, see (*ssagen.state).minmax method.
   366  			return n.Type().IsString() || n.Type().IsFloat()
   367  
   368  		case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
   369  			ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OMAKEFACE,
   370  			ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
   371  			ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
   372  			ir.OCONVNOP, ir.ODOT,
   373  			ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
   374  			ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERSP, ir.OSLICEHEADER, ir.OSTRINGHEADER:
   375  			// ok: operations that don't require function calls.
   376  			// Expand as needed.
   377  		}
   378  
   379  		return false
   380  	})
   381  }
   382  
   383  // itabType loads the _type field from a runtime.itab struct.
   384  func itabType(itab ir.Node) ir.Node {
   385  	if itabTypeField == nil {
   386  		// internal/abi.ITab's Type field
   387  		itabTypeField = runtimeField("Type", rttype.ITab.OffsetOf("Type"), types.NewPtr(types.Types[types.TUINT8]))
   388  	}
   389  	return boundedDotPtr(base.Pos, itab, itabTypeField)
   390  }
   391  
   392  var itabTypeField *types.Field
   393  
   394  // boundedDotPtr returns a selector expression representing ptr.field
   395  // and omits nil-pointer checks for ptr.
   396  func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
   397  	sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
   398  	sel.Selection = field
   399  	sel.SetType(field.Type)
   400  	sel.SetTypecheck(1)
   401  	sel.SetBounded(true) // guaranteed not to fault
   402  	return sel
   403  }
   404  
   405  func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
   406  	f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
   407  	f.Offset = offset
   408  	return f
   409  }
   410  
   411  // ifaceData loads the data field from an interface.
   412  // The concrete type must be known to have type t.
   413  // It follows the pointer if !IsDirectIface(t).
   414  func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
   415  	if t.IsInterface() {
   416  		base.Fatalf("ifaceData interface: %v", t)
   417  	}
   418  	ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
   419  	if types.IsDirectIface(t) {
   420  		ptr.SetType(t)
   421  		ptr.SetTypecheck(1)
   422  		return ptr
   423  	}
   424  	ptr.SetType(types.NewPtr(t))
   425  	ptr.SetTypecheck(1)
   426  	ind := ir.NewStarExpr(pos, ptr)
   427  	ind.SetType(t)
   428  	ind.SetTypecheck(1)
   429  	ind.SetBounded(true)
   430  	return ind
   431  }
   432  
   433  // staticValue returns the earliest expression it can find that always
   434  // evaluates to n, with similar semantics to [ir.StaticValue].
   435  //
   436  // It only returns results for the ir.CurFunc being processed in [Walk],
   437  // including its closures, and uses a cache to reduce duplicative work.
   438  // It can return n or nil if it does not find an earlier expression.
   439  //
   440  // The current use case is reducing OCONVIFACE allocations, and hence
   441  // staticValue is currently only useful when given an *ir.ConvExpr.X as n.
   442  func staticValue(n ir.Node) ir.Node {
   443  	if staticValues == nil {
   444  		base.Fatalf("staticValues is nil. staticValue called outside of walk.Walk?")
   445  	}
   446  	return staticValues[n]
   447  }
   448  
   449  // staticValues is a cache of static values for use by staticValue.
   450  var staticValues map[ir.Node]ir.Node
   451  
   452  // findStaticValues returns a map of static values for fn.
   453  func findStaticValues(fn *ir.Func) map[ir.Node]ir.Node {
   454  	// We can't use an ir.ReassignOracle or ir.StaticValue in the
   455  	// middle of walk because they don't currently handle
   456  	// transformed assignments (e.g., will complain about 'RHS == nil').
   457  	// So we instead build this map to use in walk.
   458  	ro := &ir.ReassignOracle{}
   459  	ro.Init(fn)
   460  	m := make(map[ir.Node]ir.Node)
   461  	ir.Visit(fn, func(n ir.Node) {
   462  		if n.Op() == ir.OCONVIFACE {
   463  			x := n.(*ir.ConvExpr).X
   464  			v := ro.StaticValue(x)
   465  			if v != nil && v != x {
   466  				m[x] = v
   467  			}
   468  		}
   469  	})
   470  	return m
   471  }
   472  

View as plain text