1
2
3
4
5 package escape
6
7 import (
8 "fmt"
9 "go/constant"
10 "go/token"
11
12 "cmd/compile/internal/base"
13 "cmd/compile/internal/ir"
14 "cmd/compile/internal/logopt"
15 "cmd/compile/internal/typecheck"
16 "cmd/compile/internal/types"
17 "cmd/internal/src"
18 )
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90 type batch struct {
91 allLocs []*location
92 closures []closure
93 reassignOracles map[*ir.Func]*ir.ReassignOracle
94
95 heapLoc location
96 mutatorLoc location
97 calleeLoc location
98 blankLoc location
99 }
100
101
102
103 type closure struct {
104 k hole
105 clo *ir.ClosureExpr
106 }
107
108
109
110 type escape struct {
111 *batch
112
113 curfn *ir.Func
114
115 labels map[*types.Sym]labelState
116
117
118
119
120
121 loopDepth int
122 }
123
124 func Funcs(all []*ir.Func) {
125
126
127
128 reassignOracles := make(map[*ir.Func]*ir.ReassignOracle)
129
130 ir.VisitFuncsBottomUp(all, func(list []*ir.Func, recursive bool) {
131 Batch(list, reassignOracles)
132 })
133 }
134
135
136
137 func Batch(fns []*ir.Func, reassignOracles map[*ir.Func]*ir.ReassignOracle) {
138 var b batch
139 b.heapLoc.attrs = attrEscapes | attrPersists | attrMutates | attrCalls
140 b.mutatorLoc.attrs = attrMutates
141 b.calleeLoc.attrs = attrCalls
142 b.reassignOracles = reassignOracles
143
144
145 for _, fn := range fns {
146 if base.Flag.W > 1 {
147 s := fmt.Sprintf("\nbefore escape %v", fn)
148 ir.Dump(s, fn)
149 }
150 b.initFunc(fn)
151 }
152 for _, fn := range fns {
153 if !fn.IsClosure() {
154 b.walkFunc(fn)
155 }
156 }
157
158
159
160
161
162 for _, closure := range b.closures {
163 b.flowClosure(closure.k, closure.clo)
164 }
165 b.closures = nil
166
167 for _, loc := range b.allLocs {
168
169 b.rewriteWithLiterals(loc.n, loc.curfn)
170
171
172
173 if why := HeapAllocReason(loc.n); why != "" {
174 b.flow(b.heapHole().addr(loc.n, why), loc)
175 }
176 }
177
178 b.walkAll()
179 b.finish(fns)
180 }
181
182 func (b *batch) with(fn *ir.Func) *escape {
183 return &escape{
184 batch: b,
185 curfn: fn,
186 loopDepth: 1,
187 }
188 }
189
190 func (b *batch) initFunc(fn *ir.Func) {
191 e := b.with(fn)
192 if fn.Esc() != escFuncUnknown {
193 base.Fatalf("unexpected node: %v", fn)
194 }
195 fn.SetEsc(escFuncPlanned)
196 if base.Flag.LowerM > 3 {
197 ir.Dump("escAnalyze", fn)
198 }
199
200
201 for _, n := range fn.Dcl {
202 e.newLoc(n, true)
203 }
204
205
206
207 if fn.OClosure == nil {
208 for _, n := range fn.ClosureVars {
209 e.newLoc(n.Canonical(), true)
210 }
211 }
212
213
214 for i, f := range fn.Type().Results() {
215 e.oldLoc(f.Nname.(*ir.Name)).resultIndex = 1 + i
216 }
217 }
218
219 func (b *batch) walkFunc(fn *ir.Func) {
220 e := b.with(fn)
221 fn.SetEsc(escFuncStarted)
222
223
224 ir.Visit(fn, func(n ir.Node) {
225 switch n.Op() {
226 case ir.OLABEL:
227 n := n.(*ir.LabelStmt)
228 if n.Label.IsBlank() {
229 break
230 }
231 if e.labels == nil {
232 e.labels = make(map[*types.Sym]labelState)
233 }
234 e.labels[n.Label] = nonlooping
235
236 case ir.OGOTO:
237
238
239 n := n.(*ir.BranchStmt)
240 if e.labels[n.Label] == nonlooping {
241 e.labels[n.Label] = looping
242 }
243 }
244 })
245
246 e.block(fn.Body)
247
248 if len(e.labels) != 0 {
249 base.FatalfAt(fn.Pos(), "leftover labels after walkFunc")
250 }
251 }
252
253 func (b *batch) flowClosure(k hole, clo *ir.ClosureExpr) {
254 for _, cv := range clo.Func.ClosureVars {
255 n := cv.Canonical()
256 loc := b.oldLoc(cv)
257 if !loc.captured {
258 base.FatalfAt(cv.Pos(), "closure variable never captured: %v", cv)
259 }
260
261
262 n.SetByval(!loc.addrtaken && !loc.reassigned && n.Type().Size() <= 128)
263 if !n.Byval() {
264 n.SetAddrtaken(true)
265 if n.Sym().Name == typecheck.LocalDictName {
266 base.FatalfAt(n.Pos(), "dictionary variable not captured by value")
267 }
268 }
269
270 if base.Flag.LowerM > 1 {
271 how := "ref"
272 if n.Byval() {
273 how = "value"
274 }
275 base.WarnfAt(n.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", n.Curfn, how, n, loc.addrtaken, loc.reassigned, n.Type().Size())
276 }
277
278
279 k := k
280 if !cv.Byval() {
281 k = k.addr(cv, "reference")
282 }
283 b.flow(k.note(cv, "captured by a closure"), loc)
284 }
285 }
286
287 func (b *batch) finish(fns []*ir.Func) {
288
289 for _, fn := range fns {
290 fn.SetEsc(escFuncTagged)
291
292 for i, param := range fn.Type().RecvParams() {
293 param.Note = b.paramTag(fn, 1+i, param)
294 }
295 }
296
297 for _, loc := range b.allLocs {
298 n := loc.n
299 if n == nil {
300 continue
301 }
302
303 if n.Op() == ir.ONAME {
304 n := n.(*ir.Name)
305 n.Opt = nil
306 }
307
308
309
310
311
312
313 goDeferWrapper := n.Op() == ir.OCLOSURE && n.(*ir.ClosureExpr).Func.Wrapper()
314
315 if loc.hasAttr(attrEscapes) {
316 if n.Op() == ir.ONAME {
317 if base.Flag.CompilingRuntime {
318 base.ErrorfAt(n.Pos(), 0, "%v escapes to heap, not allowed in runtime", n)
319 }
320 if base.Flag.LowerM != 0 {
321 base.WarnfAt(n.Pos(), "moved to heap: %v", n)
322 }
323 } else {
324 if base.Flag.LowerM != 0 && !goDeferWrapper {
325 if n.Op() == ir.OAPPEND {
326 base.WarnfAt(n.Pos(), "append escapes to heap")
327 } else {
328 base.WarnfAt(n.Pos(), "%v escapes to heap", n)
329 }
330 }
331 if logopt.Enabled() {
332 var e_curfn *ir.Func
333 logopt.LogOpt(n.Pos(), "escape", "escape", ir.FuncName(e_curfn))
334 }
335 }
336 n.SetEsc(ir.EscHeap)
337 } else {
338 if base.Flag.LowerM != 0 && n.Op() != ir.ONAME && !goDeferWrapper {
339 if n.Op() == ir.OAPPEND {
340 base.WarnfAt(n.Pos(), "append does not escape")
341 } else {
342 base.WarnfAt(n.Pos(), "%v does not escape", n)
343 }
344 }
345 n.SetEsc(ir.EscNone)
346 if !loc.hasAttr(attrPersists) {
347 switch n.Op() {
348 case ir.OCLOSURE:
349 n := n.(*ir.ClosureExpr)
350 n.SetTransient(true)
351 case ir.OMETHVALUE:
352 n := n.(*ir.SelectorExpr)
353 n.SetTransient(true)
354 case ir.OSLICELIT:
355 n := n.(*ir.CompLitExpr)
356 n.SetTransient(true)
357 }
358 }
359 }
360
361
362
363 if base.Debug.ZeroCopy != 0 {
364 if n, ok := n.(*ir.ConvExpr); ok && n.Op() == ir.OSTR2BYTES && !loc.hasAttr(attrMutates) {
365 if base.Flag.LowerM >= 1 {
366 base.WarnfAt(n.Pos(), "zero-copy string->[]byte conversion")
367 }
368 n.SetOp(ir.OSTR2BYTESTMP)
369 }
370 }
371 }
372 }
373
374
375
376
377
378
379 func (b *batch) inMutualBatch(fn *ir.Name) bool {
380 if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
381 if fn.Defn.Esc() == escFuncUnknown {
382 base.FatalfAt(fn.Pos(), "graph inconsistency: %v", fn)
383 }
384 return true
385 }
386 return false
387 }
388
389 const (
390 escFuncUnknown = 0 + iota
391 escFuncPlanned
392 escFuncStarted
393 escFuncTagged
394 )
395
396
397 type labelState int
398
399 const (
400 looping labelState = 1 + iota
401 nonlooping
402 )
403
404 func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
405 name := func() string {
406 if f.Nname != nil {
407 return f.Nname.Sym().Name
408 }
409 return fmt.Sprintf("arg#%d", narg)
410 }
411
412
413
414
415 diagnose := base.Flag.LowerM != 0 && !(fn.Wrapper() || fn.Dupok())
416
417 if len(fn.Body) == 0 {
418
419
420
421
422
423
424 fn.Pragma |= ir.UintptrKeepAlive
425
426 if f.Type.IsUintptr() {
427 if diagnose {
428 base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
429 }
430 return ""
431 }
432
433 if !f.Type.HasPointers() {
434 return ""
435 }
436
437 var esc leaks
438
439
440
441 if fn.Pragma&ir.Noescape != 0 {
442 if diagnose && f.Sym != nil {
443 base.WarnfAt(f.Pos, "%v does not escape", name())
444 }
445 esc.AddMutator(0)
446 esc.AddCallee(0)
447 } else {
448 if diagnose && f.Sym != nil {
449 base.WarnfAt(f.Pos, "leaking param: %v", name())
450 }
451 esc.AddHeap(0)
452 }
453
454 return esc.Encode()
455 }
456
457 if fn.Pragma&ir.UintptrEscapes != 0 {
458 if f.Type.IsUintptr() {
459 if diagnose {
460 base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
461 }
462 return ""
463 }
464 if f.IsDDD() && f.Type.Elem().IsUintptr() {
465
466 if diagnose {
467 base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
468 }
469 return ""
470 }
471 }
472
473 if !f.Type.HasPointers() {
474 return ""
475 }
476
477
478 if f.Sym == nil || f.Sym.IsBlank() {
479 var esc leaks
480 return esc.Encode()
481 }
482
483 n := f.Nname.(*ir.Name)
484 loc := b.oldLoc(n)
485 esc := loc.paramEsc
486 esc.Optimize()
487
488 if diagnose && !loc.hasAttr(attrEscapes) {
489 b.reportLeaks(f.Pos, name(), esc, fn.Type())
490 }
491
492 return esc.Encode()
493 }
494
495 func (b *batch) reportLeaks(pos src.XPos, name string, esc leaks, sig *types.Type) {
496 warned := false
497 if x := esc.Heap(); x >= 0 {
498 if x == 0 {
499 base.WarnfAt(pos, "leaking param: %v", name)
500 } else {
501
502 base.WarnfAt(pos, "leaking param content: %v", name)
503 }
504 warned = true
505 }
506 for i := 0; i < numEscResults; i++ {
507 if x := esc.Result(i); x >= 0 {
508 res := sig.Result(i).Nname.Sym().Name
509 base.WarnfAt(pos, "leaking param: %v to result %v level=%d", name, res, x)
510 warned = true
511 }
512 }
513
514 if base.Debug.EscapeMutationsCalls <= 0 {
515 if !warned {
516 base.WarnfAt(pos, "%v does not escape", name)
517 }
518 return
519 }
520
521 if x := esc.Mutator(); x >= 0 {
522 base.WarnfAt(pos, "mutates param: %v derefs=%v", name, x)
523 warned = true
524 }
525 if x := esc.Callee(); x >= 0 {
526 base.WarnfAt(pos, "calls param: %v derefs=%v", name, x)
527 warned = true
528 }
529
530 if !warned {
531 base.WarnfAt(pos, "%v does not escape, mutate, or call", name)
532 }
533 }
534
535
536
537 func (b *batch) rewriteWithLiterals(n ir.Node, fn *ir.Func) {
538 if n == nil || fn == nil {
539 return
540 }
541
542 assignTemp := func(pos src.XPos, n ir.Node, init *ir.Nodes) {
543
544 tmp := typecheck.TempAt(pos, fn, n.Type())
545 init.Append(typecheck.Stmt(ir.NewDecl(pos, ir.ODCL, tmp)))
546 init.Append(typecheck.Stmt(ir.NewAssignStmt(pos, tmp, n)))
547 }
548
549 switch n.Op() {
550 case ir.OMAKESLICE:
551
552
553 n := n.(*ir.MakeExpr)
554
555 r := &n.Cap
556 if n.Cap == nil {
557 r = &n.Len
558 }
559
560 if (*r).Op() != ir.OLITERAL {
561
562 ro := b.reassignOracle(fn)
563 if ro == nil {
564 base.Fatalf("no ReassignOracle for function %v with closure parent %v", fn, fn.ClosureParent)
565 }
566 if s := ro.StaticValue(*r); s.Op() == ir.OLITERAL {
567 lit, ok := s.(*ir.BasicLit)
568 if !ok || lit.Val().Kind() != constant.Int {
569 base.Fatalf("unexpected BasicLit Kind")
570 }
571 if constant.Compare(lit.Val(), token.GEQ, constant.MakeInt64(0)) {
572 if !base.LiteralAllocHash.MatchPos(n.Pos(), nil) {
573
574 return
575 }
576
577 assignTemp(n.Pos(), *r, n.PtrInit())
578 *r = ir.NewBasicLit(n.Pos(), (*r).Type(), lit.Val())
579 }
580 }
581 }
582 case ir.OCONVIFACE:
583
584
585 conv := n.(*ir.ConvExpr)
586 if conv.X.Op() != ir.OLITERAL && !conv.X.Type().IsInterface() {
587
588
589 ro := b.reassignOracle(fn)
590 if ro == nil {
591 base.Fatalf("no ReassignOracle for function %v with closure parent %v", fn, fn.ClosureParent)
592 }
593 v := ro.StaticValue(conv.X)
594 if v != nil && v.Op() == ir.OLITERAL && ir.ValidTypeForConst(conv.X.Type(), v.Val()) {
595 if !base.LiteralAllocHash.MatchPos(n.Pos(), nil) {
596
597 return
598 }
599 if base.Debug.EscapeDebug >= 3 {
600 base.WarnfAt(n.Pos(), "rewriting OCONVIFACE value from %v (%v) to %v (%v)", conv.X, conv.X.Type(), v, v.Type())
601 }
602
603 assignTemp(conv.Pos(), conv.X, conv.PtrInit())
604 v := v.(*ir.BasicLit)
605 conv.X = ir.NewBasicLit(conv.Pos(), conv.X.Type(), v.Val())
606 typecheck.Expr(conv)
607 }
608 }
609 }
610 }
611
612
613
614
615
616
617 func (b *batch) reassignOracle(fn *ir.Func) *ir.ReassignOracle {
618 if ro, ok := b.reassignOracles[fn]; ok {
619 return ro
620 }
621
622
623
624 f := fn
625 for f.ClosureParent != nil && !f.ClosureParent.IsPackageInit() {
626 f = f.ClosureParent
627 }
628
629 if f != fn {
630
631 ro := b.reassignOracles[f]
632 if ro != nil {
633
634 b.reassignOracles[fn] = ro
635 return ro
636 }
637 }
638
639
640 ro := &ir.ReassignOracle{}
641 ro.Init(f)
642
643
644 b.reassignOracles[fn] = ro
645 if f != fn {
646
647 b.reassignOracles[f] = ro
648 }
649 return ro
650 }
651
View as plain text