1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "cmp"
11 "fmt"
12 "go/constant"
13 "html"
14 "internal/buildcfg"
15 "os"
16 "path/filepath"
17 "slices"
18 "strings"
19
20 "cmd/compile/internal/abi"
21 "cmd/compile/internal/base"
22 "cmd/compile/internal/ir"
23 "cmd/compile/internal/liveness"
24 "cmd/compile/internal/objw"
25 "cmd/compile/internal/reflectdata"
26 "cmd/compile/internal/rttype"
27 "cmd/compile/internal/ssa"
28 "cmd/compile/internal/staticdata"
29 "cmd/compile/internal/typecheck"
30 "cmd/compile/internal/types"
31 "cmd/internal/obj"
32 "cmd/internal/objabi"
33 "cmd/internal/src"
34 "cmd/internal/sys"
35
36 rtabi "internal/abi"
37 )
38
39 var ssaConfig *ssa.Config
40 var ssaCaches []ssa.Cache
41
42 var ssaDump string
43 var ssaDir string
44 var ssaDumpStdout bool
45 var ssaDumpCFG string
46 const ssaDumpFile = "ssa.html"
47
48
49 var ssaDumpInlined []*ir.Func
50
51
52
53
54 const maxAggregatedHeapAllocation = 16
55
56 func DumpInline(fn *ir.Func) {
57 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
58 ssaDumpInlined = append(ssaDumpInlined, fn)
59 }
60 }
61
62 func InitEnv() {
63 ssaDump = os.Getenv("GOSSAFUNC")
64 ssaDir = os.Getenv("GOSSADIR")
65 if ssaDump != "" {
66 if strings.HasSuffix(ssaDump, "+") {
67 ssaDump = ssaDump[:len(ssaDump)-1]
68 ssaDumpStdout = true
69 }
70 spl := strings.Split(ssaDump, ":")
71 if len(spl) > 1 {
72 ssaDump = spl[0]
73 ssaDumpCFG = spl[1]
74 }
75 }
76 }
77
78 func InitConfig() {
79 types_ := ssa.NewTypes()
80
81 if Arch.SoftFloat {
82 softfloatInit()
83 }
84
85
86
87 _ = types.NewPtr(types.Types[types.TINTER])
88 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
89 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
90 _ = types.NewPtr(types.NewPtr(types.ByteType))
91 _ = types.NewPtr(types.NewSlice(types.ByteType))
92 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
93 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
94 _ = types.NewPtr(types.Types[types.TINT16])
95 _ = types.NewPtr(types.Types[types.TINT64])
96 _ = types.NewPtr(types.ErrorType)
97 if buildcfg.Experiment.SwissMap {
98 _ = types.NewPtr(reflectdata.SwissMapType())
99 } else {
100 _ = types.NewPtr(reflectdata.OldMapType())
101 }
102 _ = types.NewPtr(deferstruct())
103 types.NewPtrCacheEnabled = false
104 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
105 ssaConfig.Race = base.Flag.Race
106 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
107
108
109 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
110 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
111 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
112 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
113 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
114 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
115 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
116 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
117 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
118 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
119 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
120 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
121 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
122 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
123 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
124 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
125 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
126 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
127 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
128 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
129 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
130 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
131 ir.Syms.MallocGC = typecheck.LookupRuntimeFunc("mallocgc")
132 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
133 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
134 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
135 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
136 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
137 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
138 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
139 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
140 ir.Syms.PanicBounds = typecheck.LookupRuntimeFunc("panicBounds")
141 ir.Syms.PanicExtend = typecheck.LookupRuntimeFunc("panicExtend")
142 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
143 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
144 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
145 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
146 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
147 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
148 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
149 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
150 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
151 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
152 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
153 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
154 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
155 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
156 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
157 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
158 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
159 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
160 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
161 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
162 ir.Syms.Loong64HasLAMCAS = typecheck.LookupRuntimeVar("loong64HasLAMCAS")
163 ir.Syms.Loong64HasLAM_BH = typecheck.LookupRuntimeVar("loong64HasLAM_BH")
164 ir.Syms.Loong64HasLSX = typecheck.LookupRuntimeVar("loong64HasLSX")
165 ir.Syms.RISCV64HasZbb = typecheck.LookupRuntimeVar("riscv64HasZbb")
166 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
167 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
168 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
169 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
170 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
171 ir.Syms.ZeroVal = typecheck.LookupRuntimeVar("zeroVal")
172
173 if Arch.LinkArch.Family == sys.Wasm {
174 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
175 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
176 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
177 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
178 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
179 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
180 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
181 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
182 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
183 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
184 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
185 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
186 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
187 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
188 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
189 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
190 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
191 } else {
192 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
193 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
194 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
195 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
196 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
197 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
198 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
199 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
200 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
201 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
202 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
203 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
204 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
205 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
206 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
207 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
208 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
209 }
210 if Arch.LinkArch.PtrSize == 4 {
211 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
212 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
213 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
214 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
215 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
216 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
217 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
218 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
219 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
220 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
221 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
222 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
223 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
224 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
225 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
226 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
227 }
228
229
230 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
231 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
232 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
233 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
234 }
235
236 func InitTables() {
237 initIntrinsics(nil)
238 }
239
240
241
242
243
244
245
246
247 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
248 return ssaConfig.ABI0.Copy()
249 }
250
251
252
253 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
254 if buildcfg.Experiment.RegabiArgs {
255
256 if fn == nil {
257 return abi1
258 }
259 switch fn.ABI {
260 case obj.ABI0:
261 return abi0
262 case obj.ABIInternal:
263
264
265 return abi1
266 }
267 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
268 panic("not reachable")
269 }
270
271 a := abi0
272 if fn != nil {
273 if fn.Pragma&ir.RegisterParams != 0 {
274 a = abi1
275 }
276 }
277 return a
278 }
279
280
281
282
283
284
285
286
287
288
289
290
291 func (s *state) emitOpenDeferInfo() {
292 firstOffset := s.openDefers[0].closureNode.FrameOffset()
293
294
295 for i, r := range s.openDefers {
296 have := r.closureNode.FrameOffset()
297 want := firstOffset + int64(i)*int64(types.PtrSize)
298 if have != want {
299 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
300 }
301 }
302
303 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
304 x.Set(obj.AttrContentAddressable, true)
305 s.curfn.LSym.Func().OpenCodedDeferInfo = x
306
307 off := 0
308 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
309 off = objw.Uvarint(x, off, uint64(-firstOffset))
310 }
311
312
313
314 func buildssa(fn *ir.Func, worker int, isPgoHot bool) *ssa.Func {
315 name := ir.FuncName(fn)
316
317 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
318
319 printssa := false
320
321
322 if strings.Contains(ssaDump, name) {
323 nameOptABI := name
324 if l := len(ssaDump); l > 1 && ssaDump[l-2] == ',' {
325 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
326 } else if strings.HasSuffix(ssaDump, ">") {
327 l := len(ssaDump)
328 if l >= 3 && ssaDump[l-3] == '<' {
329 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
330 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
331 }
332 }
333 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
334 printssa = nameOptABI == ssaDump ||
335 pkgDotName == ssaDump ||
336 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
337 }
338
339 var astBuf *bytes.Buffer
340 if printssa {
341 astBuf = &bytes.Buffer{}
342 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
343 if ssaDumpStdout {
344 fmt.Println("generating SSA for", name)
345 fmt.Print(astBuf.String())
346 }
347 }
348
349 var s state
350 s.pushLine(fn.Pos())
351 defer s.popLine()
352
353 s.hasdefer = fn.HasDefer()
354 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
355 s.cgoUnsafeArgs = true
356 }
357 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
358
359 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
360 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
361 s.instrumentMemory = true
362 }
363 if base.Flag.Race {
364 s.instrumentEnterExit = true
365 }
366 }
367
368 fe := ssafn{
369 curfn: fn,
370 log: printssa && ssaDumpStdout,
371 }
372 s.curfn = fn
373
374 cache := &ssaCaches[worker]
375 cache.Reset()
376
377 s.f = ssaConfig.NewFunc(&fe, cache)
378 s.config = ssaConfig
379 s.f.Type = fn.Type()
380 s.f.Name = name
381 s.f.PrintOrHtmlSSA = printssa
382 if fn.Pragma&ir.Nosplit != 0 {
383 s.f.NoSplit = true
384 }
385 s.f.ABI0 = ssaConfig.ABI0
386 s.f.ABI1 = ssaConfig.ABI1
387 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
388 s.f.ABISelf = abiSelf
389
390 s.panics = map[funcLine]*ssa.Block{}
391 s.softFloat = s.config.SoftFloat
392
393
394 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
395 s.f.Entry.Pos = fn.Pos()
396 s.f.IsPgoHot = isPgoHot
397
398 if printssa {
399 ssaDF := ssaDumpFile
400 if ssaDir != "" {
401 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
402 ssaD := filepath.Dir(ssaDF)
403 os.MkdirAll(ssaD, 0755)
404 }
405 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
406
407 dumpSourcesColumn(s.f.HTMLWriter, fn)
408 s.f.HTMLWriter.WriteAST("AST", astBuf)
409 }
410
411
412 s.labels = map[string]*ssaLabel{}
413 s.fwdVars = map[ir.Node]*ssa.Value{}
414 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
415
416 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
417 switch {
418 case base.Debug.NoOpenDefer != 0:
419 s.hasOpenDefers = false
420 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
421
422
423
424
425
426 s.hasOpenDefers = false
427 }
428 if s.hasOpenDefers && s.instrumentEnterExit {
429
430
431
432 s.hasOpenDefers = false
433 }
434 if s.hasOpenDefers {
435
436
437 for _, f := range s.curfn.Type().Results() {
438 if !f.Nname.(*ir.Name).OnStack() {
439 s.hasOpenDefers = false
440 break
441 }
442 }
443 }
444 if s.hasOpenDefers &&
445 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
446
447
448
449
450
451 s.hasOpenDefers = false
452 }
453
454 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
455 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
456
457 s.startBlock(s.f.Entry)
458 s.vars[memVar] = s.startmem
459 if s.hasOpenDefers {
460
461
462
463 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
464 deferBitsTemp.SetAddrtaken(true)
465 s.deferBitsTemp = deferBitsTemp
466
467 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
468 s.vars[deferBitsVar] = startDeferBits
469 s.deferBitsAddr = s.addr(deferBitsTemp)
470 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
471
472
473
474
475
476 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
477 }
478
479 var params *abi.ABIParamResultInfo
480 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
481
482
483
484
485
486
487 var debugInfo ssa.FuncDebug
488 for _, n := range fn.Dcl {
489 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
490 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
491 }
492 }
493 fn.DebugInfo = &debugInfo
494
495
496 s.decladdrs = map[*ir.Name]*ssa.Value{}
497 for _, n := range fn.Dcl {
498 switch n.Class {
499 case ir.PPARAM:
500
501 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
502 case ir.PPARAMOUT:
503 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
504 case ir.PAUTO:
505
506
507 default:
508 s.Fatalf("local variable with class %v unimplemented", n.Class)
509 }
510 }
511
512 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
513
514
515 for _, n := range fn.Dcl {
516 if n.Class == ir.PPARAM {
517 if s.canSSA(n) {
518 v := s.newValue0A(ssa.OpArg, n.Type(), n)
519 s.vars[n] = v
520 s.addNamedValue(n, v)
521 } else {
522 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
523 if len(paramAssignment.Registers) > 0 {
524 if ssa.CanSSA(n.Type()) {
525 v := s.newValue0A(ssa.OpArg, n.Type(), n)
526 s.store(n.Type(), s.decladdrs[n], v)
527 } else {
528
529
530 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
531 }
532 }
533 }
534 }
535 }
536
537
538 if fn.Needctxt() {
539 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
540 if fn.RangeParent != nil && base.Flag.N != 0 {
541
542
543
544 sym := &types.Sym{Name: ".closureptr", Pkg: types.LocalPkg}
545 cloSlot := s.curfn.NewLocal(src.NoXPos, sym, s.f.Config.Types.BytePtr)
546 cloSlot.SetUsed(true)
547 cloSlot.SetEsc(ir.EscNever)
548 cloSlot.SetAddrtaken(true)
549 s.f.CloSlot = cloSlot
550 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, cloSlot, s.mem(), false)
551 addr := s.addr(cloSlot)
552 s.store(s.f.Config.Types.BytePtr, addr, clo)
553
554 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, cloSlot, s.mem(), false)
555 }
556 csiter := typecheck.NewClosureStructIter(fn.ClosureVars)
557 for {
558 n, typ, offset := csiter.Next()
559 if n == nil {
560 break
561 }
562
563 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
564
565
566
567
568
569
570
571
572
573 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
574 n.Class = ir.PAUTO
575 fn.Dcl = append(fn.Dcl, n)
576 s.assign(n, s.load(n.Type(), ptr), false, 0)
577 continue
578 }
579
580 if !n.Byval() {
581 ptr = s.load(typ, ptr)
582 }
583 s.setHeapaddr(fn.Pos(), n, ptr)
584 }
585 }
586
587
588 if s.instrumentEnterExit {
589 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
590 }
591 s.zeroResults()
592 s.paramsToHeap()
593 s.stmtList(fn.Body)
594
595
596 if s.curBlock != nil {
597 s.pushLine(fn.Endlineno)
598 s.exit()
599 s.popLine()
600 }
601
602 for _, b := range s.f.Blocks {
603 if b.Pos != src.NoXPos {
604 s.updateUnsetPredPos(b)
605 }
606 }
607
608 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
609
610 s.insertPhis()
611
612
613 ssa.Compile(s.f)
614
615 fe.AllocFrame(s.f)
616
617 if len(s.openDefers) != 0 {
618 s.emitOpenDeferInfo()
619 }
620
621
622
623
624
625
626 for _, p := range params.InParams() {
627 typs, offs := p.RegisterTypesAndOffsets()
628 for i, t := range typs {
629 o := offs[i]
630 fo := p.FrameOffset(params)
631 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
632 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
633 }
634 }
635
636 return s.f
637 }
638
639 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
640 typs, offs := paramAssignment.RegisterTypesAndOffsets()
641 for i, t := range typs {
642 if pointersOnly && !t.IsPtrShaped() {
643 continue
644 }
645 r := paramAssignment.Registers[i]
646 o := offs[i]
647 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
648 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
649 v := s.newValue0I(op, t, reg)
650 v.Aux = aux
651 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
652 s.store(t, p, v)
653 }
654 }
655
656
657
658
659
660
661
662 func (s *state) zeroResults() {
663 for _, f := range s.curfn.Type().Results() {
664 n := f.Nname.(*ir.Name)
665 if !n.OnStack() {
666
667
668
669 continue
670 }
671
672 if typ := n.Type(); ssa.CanSSA(typ) {
673 s.assign(n, s.zeroVal(typ), false, 0)
674 } else {
675 if typ.HasPointers() || ssa.IsMergeCandidate(n) {
676 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
677 }
678 s.zero(n.Type(), s.decladdrs[n])
679 }
680 }
681 }
682
683
684
685 func (s *state) paramsToHeap() {
686 do := func(params []*types.Field) {
687 for _, f := range params {
688 if f.Nname == nil {
689 continue
690 }
691 n := f.Nname.(*ir.Name)
692 if ir.IsBlank(n) || n.OnStack() {
693 continue
694 }
695 s.newHeapaddr(n)
696 if n.Class == ir.PPARAM {
697 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
698 }
699 }
700 }
701
702 typ := s.curfn.Type()
703 do(typ.Recvs())
704 do(typ.Params())
705 do(typ.Results())
706 }
707
708
709
710
711 func allocSizeAndAlign(t *types.Type) (int64, int64) {
712 size, align := t.Size(), t.Alignment()
713 if types.PtrSize == 4 && align == 4 && size >= 8 {
714
715 size = types.RoundUp(size, 8)
716 align = 8
717 }
718 return size, align
719 }
720 func allocSize(t *types.Type) int64 {
721 size, _ := allocSizeAndAlign(t)
722 return size
723 }
724 func allocAlign(t *types.Type) int64 {
725 _, align := allocSizeAndAlign(t)
726 return align
727 }
728
729
730 func (s *state) newHeapaddr(n *ir.Name) {
731 size := allocSize(n.Type())
732 if n.Type().HasPointers() || size >= maxAggregatedHeapAllocation || size == 0 {
733 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
734 return
735 }
736
737
738
739 var used int64
740 for _, v := range s.pendingHeapAllocations {
741 used += allocSize(v.Type.Elem())
742 }
743 if used+size > maxAggregatedHeapAllocation {
744 s.flushPendingHeapAllocations()
745 }
746
747 var allocCall *ssa.Value
748 if len(s.pendingHeapAllocations) == 0 {
749
750
751
752 allocCall = s.newObject(n.Type(), nil)
753 } else {
754 allocCall = s.pendingHeapAllocations[0].Args[0]
755 }
756
757 v := s.newValue1I(ssa.OpOffPtr, n.Type().PtrTo(), 0, allocCall)
758
759
760 s.pendingHeapAllocations = append(s.pendingHeapAllocations, v)
761
762
763 s.setHeapaddr(n.Pos(), n, v)
764 }
765
766 func (s *state) flushPendingHeapAllocations() {
767 pending := s.pendingHeapAllocations
768 if len(pending) == 0 {
769 return
770 }
771 s.pendingHeapAllocations = nil
772 ptr := pending[0].Args[0]
773 call := ptr.Args[0]
774
775 if len(pending) == 1 {
776
777 v := pending[0]
778 v.Op = ssa.OpCopy
779 return
780 }
781
782
783
784
785 slices.SortStableFunc(pending, func(x, y *ssa.Value) int {
786 return cmp.Compare(allocAlign(y.Type.Elem()), allocAlign(x.Type.Elem()))
787 })
788
789
790 var size int64
791 for _, v := range pending {
792 v.AuxInt = size
793 size += allocSize(v.Type.Elem())
794 }
795 align := allocAlign(pending[0].Type.Elem())
796 size = types.RoundUp(size, align)
797
798
799 args := []*ssa.Value{
800 s.constInt(types.Types[types.TUINTPTR], size),
801 s.constNil(call.Args[0].Type),
802 s.constBool(true),
803 call.Args[1],
804 }
805 call.Aux = ssa.StaticAuxCall(ir.Syms.MallocGC, s.f.ABIDefault.ABIAnalyzeTypes(
806 []*types.Type{args[0].Type, args[1].Type, args[2].Type},
807 []*types.Type{types.Types[types.TUNSAFEPTR]},
808 ))
809 call.AuxInt = 4 * s.config.PtrSize
810 call.SetArgs4(args[0], args[1], args[2], args[3])
811
812
813 call.Type = types.NewTuple(types.Types[types.TUNSAFEPTR], types.TypeMem)
814 ptr.Type = types.Types[types.TUNSAFEPTR]
815 }
816
817
818
819 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
820 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
821 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
822 }
823
824
825 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
826 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
827 addr.SetUsed(true)
828 types.CalcSize(addr.Type())
829
830 if n.Class == ir.PPARAMOUT {
831 addr.SetIsOutputParamHeapAddr(true)
832 }
833
834 n.Heapaddr = addr
835 s.assign(addr, ptr, false, 0)
836 }
837
838
839 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
840 if typ.Size() == 0 {
841 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
842 }
843 if rtype == nil {
844 rtype = s.reflectType(typ)
845 }
846 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
847 }
848
849 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
850 if !n.Type().IsPtr() {
851 s.Fatalf("expected pointer type: %v", n.Type())
852 }
853 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
854 if count != nil {
855 if !elem.IsArray() {
856 s.Fatalf("expected array type: %v", elem)
857 }
858 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
859 }
860 size := elem.Size()
861
862 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
863 return
864 }
865 if count == nil {
866 count = s.constInt(types.Types[types.TUINTPTR], 1)
867 }
868 if count.Type.Size() != s.config.PtrSize {
869 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
870 }
871 var rtype *ssa.Value
872 if rtypeExpr != nil {
873 rtype = s.expr(rtypeExpr)
874 } else {
875 rtype = s.reflectType(elem)
876 }
877 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
878 }
879
880
881
882 func (s *state) reflectType(typ *types.Type) *ssa.Value {
883
884
885 lsym := reflectdata.TypeLinksym(typ)
886 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
887 }
888
889 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
890
891 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
892 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
893 if err != nil {
894 writer.Logf("cannot read sources for function %v: %v", fn, err)
895 }
896
897
898 var inlFns []*ssa.FuncLines
899 for _, fi := range ssaDumpInlined {
900 elno := fi.Endlineno
901 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
902 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
903 if err != nil {
904 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
905 continue
906 }
907 inlFns = append(inlFns, fnLines)
908 }
909
910 slices.SortFunc(inlFns, ssa.ByTopoCmp)
911 if targetFn != nil {
912 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
913 }
914
915 writer.WriteSources("sources", inlFns)
916 }
917
918 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
919 f, err := os.Open(os.ExpandEnv(file))
920 if err != nil {
921 return nil, err
922 }
923 defer f.Close()
924 var lines []string
925 ln := uint(1)
926 scanner := bufio.NewScanner(f)
927 for scanner.Scan() && ln <= end {
928 if ln >= start {
929 lines = append(lines, scanner.Text())
930 }
931 ln++
932 }
933 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
934 }
935
936
937
938
939 func (s *state) updateUnsetPredPos(b *ssa.Block) {
940 if b.Pos == src.NoXPos {
941 s.Fatalf("Block %s should have a position", b)
942 }
943 bestPos := src.NoXPos
944 for _, e := range b.Preds {
945 p := e.Block()
946 if !p.LackingPos() {
947 continue
948 }
949 if bestPos == src.NoXPos {
950 bestPos = b.Pos
951 for _, v := range b.Values {
952 if v.LackingPos() {
953 continue
954 }
955 if v.Pos != src.NoXPos {
956
957
958 bestPos = v.Pos
959 break
960 }
961 }
962 }
963 p.Pos = bestPos
964 s.updateUnsetPredPos(p)
965 }
966 }
967
968
969 type openDeferInfo struct {
970
971 n *ir.CallExpr
972
973
974 closure *ssa.Value
975
976
977
978 closureNode *ir.Name
979 }
980
981 type state struct {
982
983 config *ssa.Config
984
985
986 f *ssa.Func
987
988
989 curfn *ir.Func
990
991
992 labels map[string]*ssaLabel
993
994
995 breakTo *ssa.Block
996 continueTo *ssa.Block
997
998
999 curBlock *ssa.Block
1000
1001
1002
1003
1004 vars map[ir.Node]*ssa.Value
1005
1006
1007
1008
1009 fwdVars map[ir.Node]*ssa.Value
1010
1011
1012 defvars []map[ir.Node]*ssa.Value
1013
1014
1015 decladdrs map[*ir.Name]*ssa.Value
1016
1017
1018 startmem *ssa.Value
1019 sp *ssa.Value
1020 sb *ssa.Value
1021
1022 deferBitsAddr *ssa.Value
1023 deferBitsTemp *ir.Name
1024
1025
1026 line []src.XPos
1027
1028 lastPos src.XPos
1029
1030
1031
1032 panics map[funcLine]*ssa.Block
1033
1034 cgoUnsafeArgs bool
1035 hasdefer bool
1036 softFloat bool
1037 hasOpenDefers bool
1038 checkPtrEnabled bool
1039 instrumentEnterExit bool
1040 instrumentMemory bool
1041
1042
1043
1044
1045 openDefers []*openDeferInfo
1046
1047
1048
1049
1050 lastDeferExit *ssa.Block
1051 lastDeferFinalBlock *ssa.Block
1052 lastDeferCount int
1053
1054 prevCall *ssa.Value
1055
1056
1057
1058
1059 pendingHeapAllocations []*ssa.Value
1060
1061
1062 appendTargets map[ir.Node]bool
1063 }
1064
1065 type funcLine struct {
1066 f *obj.LSym
1067 base *src.PosBase
1068 line uint
1069 }
1070
1071 type ssaLabel struct {
1072 target *ssa.Block
1073 breakTarget *ssa.Block
1074 continueTarget *ssa.Block
1075 }
1076
1077
1078 func (s *state) label(sym *types.Sym) *ssaLabel {
1079 lab := s.labels[sym.Name]
1080 if lab == nil {
1081 lab = new(ssaLabel)
1082 s.labels[sym.Name] = lab
1083 }
1084 return lab
1085 }
1086
1087 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
1088 func (s *state) Log() bool { return s.f.Log() }
1089 func (s *state) Fatalf(msg string, args ...interface{}) {
1090 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
1091 }
1092 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
1093 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
1094
1095 func ssaMarker(name string) *ir.Name {
1096 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
1097 }
1098
1099 var (
1100
1101 memVar = ssaMarker("mem")
1102
1103
1104 ptrVar = ssaMarker("ptr")
1105 lenVar = ssaMarker("len")
1106 capVar = ssaMarker("cap")
1107 typVar = ssaMarker("typ")
1108 okVar = ssaMarker("ok")
1109 deferBitsVar = ssaMarker("deferBits")
1110 hashVar = ssaMarker("hash")
1111 )
1112
1113
1114 func (s *state) startBlock(b *ssa.Block) {
1115 if s.curBlock != nil {
1116 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
1117 }
1118 s.curBlock = b
1119 s.vars = map[ir.Node]*ssa.Value{}
1120 clear(s.fwdVars)
1121 }
1122
1123
1124
1125
1126 func (s *state) endBlock() *ssa.Block {
1127 b := s.curBlock
1128 if b == nil {
1129 return nil
1130 }
1131
1132 s.flushPendingHeapAllocations()
1133
1134 for len(s.defvars) <= int(b.ID) {
1135 s.defvars = append(s.defvars, nil)
1136 }
1137 s.defvars[b.ID] = s.vars
1138 s.curBlock = nil
1139 s.vars = nil
1140 if b.LackingPos() {
1141
1142
1143
1144 b.Pos = src.NoXPos
1145 } else {
1146 b.Pos = s.lastPos
1147 }
1148 return b
1149 }
1150
1151
1152 func (s *state) pushLine(line src.XPos) {
1153 if !line.IsKnown() {
1154
1155
1156 line = s.peekPos()
1157 if base.Flag.K != 0 {
1158 base.Warn("buildssa: unknown position (line 0)")
1159 }
1160 } else {
1161 s.lastPos = line
1162 }
1163
1164 s.line = append(s.line, line)
1165 }
1166
1167
1168 func (s *state) popLine() {
1169 s.line = s.line[:len(s.line)-1]
1170 }
1171
1172
1173 func (s *state) peekPos() src.XPos {
1174 return s.line[len(s.line)-1]
1175 }
1176
1177
1178 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1179 return s.curBlock.NewValue0(s.peekPos(), op, t)
1180 }
1181
1182
1183 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1184 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1185 }
1186
1187
1188 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1189 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1190 }
1191
1192
1193 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1194 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1195 }
1196
1197
1198 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1199 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1200 }
1201
1202
1203
1204
1205 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1206 if isStmt {
1207 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1208 }
1209 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1210 }
1211
1212
1213 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1214 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1215 }
1216
1217
1218 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1219 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1220 }
1221
1222
1223 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1224 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1225 }
1226
1227
1228
1229
1230 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1231 if isStmt {
1232 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1233 }
1234 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1235 }
1236
1237
1238 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1239 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1240 }
1241
1242
1243 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1244 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1245 }
1246
1247
1248 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1249 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1250 }
1251
1252
1253 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1254 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1255 }
1256
1257
1258
1259
1260 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1261 if isStmt {
1262 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1263 }
1264 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1265 }
1266
1267
1268 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1269 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1270 }
1271
1272
1273 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1274 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1275 }
1276
1277 func (s *state) entryBlock() *ssa.Block {
1278 b := s.f.Entry
1279 if base.Flag.N > 0 && s.curBlock != nil {
1280
1281
1282
1283
1284 b = s.curBlock
1285 }
1286 return b
1287 }
1288
1289
1290 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1291 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1292 }
1293
1294
1295 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1296 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1297 }
1298
1299
1300 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1301 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1302 }
1303
1304
1305 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1306 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1307 }
1308
1309
1310 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1311 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1312 }
1313
1314
1315 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1316 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1317 }
1318
1319
1320 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1321 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1322 }
1323
1324
1325 func (s *state) constSlice(t *types.Type) *ssa.Value {
1326 return s.f.ConstSlice(t)
1327 }
1328 func (s *state) constInterface(t *types.Type) *ssa.Value {
1329 return s.f.ConstInterface(t)
1330 }
1331 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1332 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1333 return s.f.ConstEmptyString(t)
1334 }
1335 func (s *state) constBool(c bool) *ssa.Value {
1336 return s.f.ConstBool(types.Types[types.TBOOL], c)
1337 }
1338 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1339 return s.f.ConstInt8(t, c)
1340 }
1341 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1342 return s.f.ConstInt16(t, c)
1343 }
1344 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1345 return s.f.ConstInt32(t, c)
1346 }
1347 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1348 return s.f.ConstInt64(t, c)
1349 }
1350 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1351 return s.f.ConstFloat32(t, c)
1352 }
1353 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1354 return s.f.ConstFloat64(t, c)
1355 }
1356 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1357 if s.config.PtrSize == 8 {
1358 return s.constInt64(t, c)
1359 }
1360 if int64(int32(c)) != c {
1361 s.Fatalf("integer constant too big %d", c)
1362 }
1363 return s.constInt32(t, int32(c))
1364 }
1365 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1366 return s.f.ConstOffPtrSP(t, c, s.sp)
1367 }
1368
1369
1370
1371 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1372 if s.softFloat {
1373 if c, ok := s.sfcall(op, arg); ok {
1374 return c
1375 }
1376 }
1377 return s.newValue1(op, t, arg)
1378 }
1379 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1380 if s.softFloat {
1381 if c, ok := s.sfcall(op, arg0, arg1); ok {
1382 return c
1383 }
1384 }
1385 return s.newValue2(op, t, arg0, arg1)
1386 }
1387
1388 type instrumentKind uint8
1389
1390 const (
1391 instrumentRead = iota
1392 instrumentWrite
1393 instrumentMove
1394 )
1395
1396 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1397 s.instrument2(t, addr, nil, kind)
1398 }
1399
1400
1401
1402
1403 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1404 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1405 s.instrument(t, addr, kind)
1406 return
1407 }
1408 for _, f := range t.Fields() {
1409 if f.Sym.IsBlank() {
1410 continue
1411 }
1412 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1413 s.instrumentFields(f.Type, offptr, kind)
1414 }
1415 }
1416
1417 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1418 if base.Flag.MSan {
1419 s.instrument2(t, dst, src, instrumentMove)
1420 } else {
1421 s.instrument(t, src, instrumentRead)
1422 s.instrument(t, dst, instrumentWrite)
1423 }
1424 }
1425
1426 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1427 if !s.instrumentMemory {
1428 return
1429 }
1430
1431 w := t.Size()
1432 if w == 0 {
1433 return
1434 }
1435
1436 if ssa.IsSanitizerSafeAddr(addr) {
1437 return
1438 }
1439
1440 var fn *obj.LSym
1441 needWidth := false
1442
1443 if addr2 != nil && kind != instrumentMove {
1444 panic("instrument2: non-nil addr2 for non-move instrumentation")
1445 }
1446
1447 if base.Flag.MSan {
1448 switch kind {
1449 case instrumentRead:
1450 fn = ir.Syms.Msanread
1451 case instrumentWrite:
1452 fn = ir.Syms.Msanwrite
1453 case instrumentMove:
1454 fn = ir.Syms.Msanmove
1455 default:
1456 panic("unreachable")
1457 }
1458 needWidth = true
1459 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1460
1461
1462
1463 switch kind {
1464 case instrumentRead:
1465 fn = ir.Syms.Racereadrange
1466 case instrumentWrite:
1467 fn = ir.Syms.Racewriterange
1468 default:
1469 panic("unreachable")
1470 }
1471 needWidth = true
1472 } else if base.Flag.Race {
1473
1474
1475 switch kind {
1476 case instrumentRead:
1477 fn = ir.Syms.Raceread
1478 case instrumentWrite:
1479 fn = ir.Syms.Racewrite
1480 default:
1481 panic("unreachable")
1482 }
1483 } else if base.Flag.ASan {
1484 switch kind {
1485 case instrumentRead:
1486 fn = ir.Syms.Asanread
1487 case instrumentWrite:
1488 fn = ir.Syms.Asanwrite
1489 default:
1490 panic("unreachable")
1491 }
1492 needWidth = true
1493 } else {
1494 panic("unreachable")
1495 }
1496
1497 args := []*ssa.Value{addr}
1498 if addr2 != nil {
1499 args = append(args, addr2)
1500 }
1501 if needWidth {
1502 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1503 }
1504 s.rtcall(fn, true, nil, args...)
1505 }
1506
1507 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1508 s.instrumentFields(t, src, instrumentRead)
1509 return s.rawLoad(t, src)
1510 }
1511
1512 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1513 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1514 }
1515
1516 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1517 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1518 }
1519
1520 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1521 s.instrument(t, dst, instrumentWrite)
1522 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1523 store.Aux = t
1524 s.vars[memVar] = store
1525 }
1526
1527 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1528 s.moveWhichMayOverlap(t, dst, src, false)
1529 }
1530 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1531 s.instrumentMove(t, dst, src)
1532 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556 if t.HasPointers() {
1557 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1558
1559
1560
1561
1562 s.curfn.SetWBPos(s.peekPos())
1563 } else {
1564 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1565 }
1566 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1567 return
1568 }
1569 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1570 store.Aux = t
1571 s.vars[memVar] = store
1572 }
1573
1574
1575 func (s *state) stmtList(l ir.Nodes) {
1576 for _, n := range l {
1577 s.stmt(n)
1578 }
1579 }
1580
1581
1582 func (s *state) stmt(n ir.Node) {
1583 s.pushLine(n.Pos())
1584 defer s.popLine()
1585
1586
1587
1588 if s.curBlock == nil && n.Op() != ir.OLABEL {
1589 return
1590 }
1591
1592 s.stmtList(n.Init())
1593 switch n.Op() {
1594
1595 case ir.OBLOCK:
1596 n := n.(*ir.BlockStmt)
1597 s.stmtList(n.List)
1598
1599 case ir.OFALL:
1600
1601
1602 case ir.OCALLFUNC:
1603 n := n.(*ir.CallExpr)
1604 if ir.IsIntrinsicCall(n) {
1605 s.intrinsicCall(n)
1606 return
1607 }
1608 fallthrough
1609
1610 case ir.OCALLINTER:
1611 n := n.(*ir.CallExpr)
1612 s.callResult(n, callNormal)
1613 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1614 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1615 n.Fun.Sym().Pkg == ir.Pkgs.Runtime &&
1616 (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" ||
1617 fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" ||
1618 fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr" ||
1619 fn == "panicrangestate") {
1620 m := s.mem()
1621 b := s.endBlock()
1622 b.Kind = ssa.BlockExit
1623 b.SetControl(m)
1624
1625
1626
1627 }
1628 }
1629 case ir.ODEFER:
1630 n := n.(*ir.GoDeferStmt)
1631 if base.Debug.Defer > 0 {
1632 var defertype string
1633 if s.hasOpenDefers {
1634 defertype = "open-coded"
1635 } else if n.Esc() == ir.EscNever {
1636 defertype = "stack-allocated"
1637 } else {
1638 defertype = "heap-allocated"
1639 }
1640 base.WarnfAt(n.Pos(), "%s defer", defertype)
1641 }
1642 if s.hasOpenDefers {
1643 s.openDeferRecord(n.Call.(*ir.CallExpr))
1644 } else {
1645 d := callDefer
1646 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1647 d = callDeferStack
1648 }
1649 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1650 }
1651 case ir.OGO:
1652 n := n.(*ir.GoDeferStmt)
1653 s.callResult(n.Call.(*ir.CallExpr), callGo)
1654
1655 case ir.OAS2DOTTYPE:
1656 n := n.(*ir.AssignListStmt)
1657 var res, resok *ssa.Value
1658 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1659 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1660 } else {
1661 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1662 }
1663 deref := false
1664 if !ssa.CanSSA(n.Rhs[0].Type()) {
1665 if res.Op != ssa.OpLoad {
1666 s.Fatalf("dottype of non-load")
1667 }
1668 mem := s.mem()
1669 if res.Args[1] != mem {
1670 s.Fatalf("memory no longer live from 2-result dottype load")
1671 }
1672 deref = true
1673 res = res.Args[0]
1674 }
1675 s.assign(n.Lhs[0], res, deref, 0)
1676 s.assign(n.Lhs[1], resok, false, 0)
1677 return
1678
1679 case ir.OAS2FUNC:
1680
1681 n := n.(*ir.AssignListStmt)
1682 call := n.Rhs[0].(*ir.CallExpr)
1683 if !ir.IsIntrinsicCall(call) {
1684 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1685 }
1686 v := s.intrinsicCall(call)
1687 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1688 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1689 s.assign(n.Lhs[0], v1, false, 0)
1690 s.assign(n.Lhs[1], v2, false, 0)
1691 return
1692
1693 case ir.ODCL:
1694 n := n.(*ir.Decl)
1695 if v := n.X; v.Esc() == ir.EscHeap {
1696 s.newHeapaddr(v)
1697 }
1698
1699 case ir.OLABEL:
1700 n := n.(*ir.LabelStmt)
1701 sym := n.Label
1702 if sym.IsBlank() {
1703
1704 break
1705 }
1706 lab := s.label(sym)
1707
1708
1709 if lab.target == nil {
1710 lab.target = s.f.NewBlock(ssa.BlockPlain)
1711 }
1712
1713
1714
1715 if s.curBlock != nil {
1716 b := s.endBlock()
1717 b.AddEdgeTo(lab.target)
1718 }
1719 s.startBlock(lab.target)
1720
1721 case ir.OGOTO:
1722 n := n.(*ir.BranchStmt)
1723 sym := n.Label
1724
1725 lab := s.label(sym)
1726 if lab.target == nil {
1727 lab.target = s.f.NewBlock(ssa.BlockPlain)
1728 }
1729
1730 b := s.endBlock()
1731 b.Pos = s.lastPos.WithIsStmt()
1732 b.AddEdgeTo(lab.target)
1733
1734 case ir.OAS:
1735 n := n.(*ir.AssignStmt)
1736 if n.X == n.Y && n.X.Op() == ir.ONAME {
1737
1738
1739
1740
1741
1742
1743
1744 return
1745 }
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1757 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1758 p := n.Y.(*ir.StarExpr).X
1759 for p.Op() == ir.OCONVNOP {
1760 p = p.(*ir.ConvExpr).X
1761 }
1762 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1763
1764
1765 mayOverlap = false
1766 }
1767 }
1768
1769
1770 rhs := n.Y
1771 if rhs != nil {
1772 switch rhs.Op() {
1773 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1774
1775
1776
1777 if !ir.IsZero(rhs) {
1778 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1779 }
1780 rhs = nil
1781 case ir.OAPPEND:
1782 rhs := rhs.(*ir.CallExpr)
1783
1784
1785
1786 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1787 break
1788 }
1789
1790
1791
1792 if s.canSSA(n.X) {
1793 if base.Debug.Append > 0 {
1794 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1795 }
1796 break
1797 }
1798 if base.Debug.Append > 0 {
1799 base.WarnfAt(n.Pos(), "append: len-only update")
1800 }
1801 s.append(rhs, true)
1802 return
1803 }
1804 }
1805
1806 if ir.IsBlank(n.X) {
1807
1808
1809 if rhs != nil {
1810 s.expr(rhs)
1811 }
1812 return
1813 }
1814
1815 var t *types.Type
1816 if n.Y != nil {
1817 t = n.Y.Type()
1818 } else {
1819 t = n.X.Type()
1820 }
1821
1822 var r *ssa.Value
1823 deref := !ssa.CanSSA(t)
1824 if deref {
1825 if rhs == nil {
1826 r = nil
1827 } else {
1828 r = s.addr(rhs)
1829 }
1830 } else {
1831 if rhs == nil {
1832 r = s.zeroVal(t)
1833 } else {
1834 r = s.expr(rhs)
1835 }
1836 }
1837
1838 var skip skipMask
1839 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1840
1841
1842 rhs := rhs.(*ir.SliceExpr)
1843 i, j, k := rhs.Low, rhs.High, rhs.Max
1844 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1845
1846 i = nil
1847 }
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858 if i == nil {
1859 skip |= skipPtr
1860 if j == nil {
1861 skip |= skipLen
1862 }
1863 if k == nil {
1864 skip |= skipCap
1865 }
1866 }
1867 }
1868
1869 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1870
1871 case ir.OIF:
1872 n := n.(*ir.IfStmt)
1873 if ir.IsConst(n.Cond, constant.Bool) {
1874 s.stmtList(n.Cond.Init())
1875 if ir.BoolVal(n.Cond) {
1876 s.stmtList(n.Body)
1877 } else {
1878 s.stmtList(n.Else)
1879 }
1880 break
1881 }
1882
1883 bEnd := s.f.NewBlock(ssa.BlockPlain)
1884 var likely int8
1885 if n.Likely {
1886 likely = 1
1887 }
1888 var bThen *ssa.Block
1889 if len(n.Body) != 0 {
1890 bThen = s.f.NewBlock(ssa.BlockPlain)
1891 } else {
1892 bThen = bEnd
1893 }
1894 var bElse *ssa.Block
1895 if len(n.Else) != 0 {
1896 bElse = s.f.NewBlock(ssa.BlockPlain)
1897 } else {
1898 bElse = bEnd
1899 }
1900 s.condBranch(n.Cond, bThen, bElse, likely)
1901
1902 if len(n.Body) != 0 {
1903 s.startBlock(bThen)
1904 s.stmtList(n.Body)
1905 if b := s.endBlock(); b != nil {
1906 b.AddEdgeTo(bEnd)
1907 }
1908 }
1909 if len(n.Else) != 0 {
1910 s.startBlock(bElse)
1911 s.stmtList(n.Else)
1912 if b := s.endBlock(); b != nil {
1913 b.AddEdgeTo(bEnd)
1914 }
1915 }
1916 s.startBlock(bEnd)
1917
1918 case ir.ORETURN:
1919 n := n.(*ir.ReturnStmt)
1920 s.stmtList(n.Results)
1921 b := s.exit()
1922 b.Pos = s.lastPos.WithIsStmt()
1923
1924 case ir.OTAILCALL:
1925 n := n.(*ir.TailCallStmt)
1926 s.callResult(n.Call, callTail)
1927 call := s.mem()
1928 b := s.endBlock()
1929 b.Kind = ssa.BlockRetJmp
1930 b.SetControl(call)
1931
1932 case ir.OCONTINUE, ir.OBREAK:
1933 n := n.(*ir.BranchStmt)
1934 var to *ssa.Block
1935 if n.Label == nil {
1936
1937 switch n.Op() {
1938 case ir.OCONTINUE:
1939 to = s.continueTo
1940 case ir.OBREAK:
1941 to = s.breakTo
1942 }
1943 } else {
1944
1945 sym := n.Label
1946 lab := s.label(sym)
1947 switch n.Op() {
1948 case ir.OCONTINUE:
1949 to = lab.continueTarget
1950 case ir.OBREAK:
1951 to = lab.breakTarget
1952 }
1953 }
1954
1955 b := s.endBlock()
1956 b.Pos = s.lastPos.WithIsStmt()
1957 b.AddEdgeTo(to)
1958
1959 case ir.OFOR:
1960
1961
1962 n := n.(*ir.ForStmt)
1963 base.Assert(!n.DistinctVars)
1964 bCond := s.f.NewBlock(ssa.BlockPlain)
1965 bBody := s.f.NewBlock(ssa.BlockPlain)
1966 bIncr := s.f.NewBlock(ssa.BlockPlain)
1967 bEnd := s.f.NewBlock(ssa.BlockPlain)
1968
1969
1970 bBody.Pos = n.Pos()
1971
1972
1973 b := s.endBlock()
1974 b.AddEdgeTo(bCond)
1975
1976
1977 s.startBlock(bCond)
1978 if n.Cond != nil {
1979 s.condBranch(n.Cond, bBody, bEnd, 1)
1980 } else {
1981 b := s.endBlock()
1982 b.Kind = ssa.BlockPlain
1983 b.AddEdgeTo(bBody)
1984 }
1985
1986
1987 prevContinue := s.continueTo
1988 prevBreak := s.breakTo
1989 s.continueTo = bIncr
1990 s.breakTo = bEnd
1991 var lab *ssaLabel
1992 if sym := n.Label; sym != nil {
1993
1994 lab = s.label(sym)
1995 lab.continueTarget = bIncr
1996 lab.breakTarget = bEnd
1997 }
1998
1999
2000 s.startBlock(bBody)
2001 s.stmtList(n.Body)
2002
2003
2004 s.continueTo = prevContinue
2005 s.breakTo = prevBreak
2006 if lab != nil {
2007 lab.continueTarget = nil
2008 lab.breakTarget = nil
2009 }
2010
2011
2012 if b := s.endBlock(); b != nil {
2013 b.AddEdgeTo(bIncr)
2014 }
2015
2016
2017 s.startBlock(bIncr)
2018 if n.Post != nil {
2019 s.stmt(n.Post)
2020 }
2021 if b := s.endBlock(); b != nil {
2022 b.AddEdgeTo(bCond)
2023
2024
2025 if b.Pos == src.NoXPos {
2026 b.Pos = bCond.Pos
2027 }
2028 }
2029
2030 s.startBlock(bEnd)
2031
2032 case ir.OSWITCH, ir.OSELECT:
2033
2034
2035 bEnd := s.f.NewBlock(ssa.BlockPlain)
2036
2037 prevBreak := s.breakTo
2038 s.breakTo = bEnd
2039 var sym *types.Sym
2040 var body ir.Nodes
2041 if n.Op() == ir.OSWITCH {
2042 n := n.(*ir.SwitchStmt)
2043 sym = n.Label
2044 body = n.Compiled
2045 } else {
2046 n := n.(*ir.SelectStmt)
2047 sym = n.Label
2048 body = n.Compiled
2049 }
2050
2051 var lab *ssaLabel
2052 if sym != nil {
2053
2054 lab = s.label(sym)
2055 lab.breakTarget = bEnd
2056 }
2057
2058
2059 s.stmtList(body)
2060
2061 s.breakTo = prevBreak
2062 if lab != nil {
2063 lab.breakTarget = nil
2064 }
2065
2066
2067
2068 if s.curBlock != nil {
2069 m := s.mem()
2070 b := s.endBlock()
2071 b.Kind = ssa.BlockExit
2072 b.SetControl(m)
2073 }
2074 s.startBlock(bEnd)
2075
2076 case ir.OJUMPTABLE:
2077 n := n.(*ir.JumpTableStmt)
2078
2079
2080 jt := s.f.NewBlock(ssa.BlockJumpTable)
2081 bEnd := s.f.NewBlock(ssa.BlockPlain)
2082
2083
2084 idx := s.expr(n.Idx)
2085 unsigned := idx.Type.IsUnsigned()
2086
2087
2088 t := types.Types[types.TUINTPTR]
2089 idx = s.conv(nil, idx, idx.Type, t)
2090
2091
2092
2093
2094
2095
2096
2097 var min, max uint64
2098 if unsigned {
2099 min, _ = constant.Uint64Val(n.Cases[0])
2100 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
2101 } else {
2102 mn, _ := constant.Int64Val(n.Cases[0])
2103 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
2104 min = uint64(mn)
2105 max = uint64(mx)
2106 }
2107
2108 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
2109 width := s.uintptrConstant(max - min)
2110 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
2111 b := s.endBlock()
2112 b.Kind = ssa.BlockIf
2113 b.SetControl(cmp)
2114 b.AddEdgeTo(jt)
2115 b.AddEdgeTo(bEnd)
2116 b.Likely = ssa.BranchLikely
2117
2118
2119 s.startBlock(jt)
2120 jt.Pos = n.Pos()
2121 if base.Flag.Cfg.SpectreIndex {
2122 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
2123 }
2124 jt.SetControl(idx)
2125
2126
2127 table := make([]*ssa.Block, max-min+1)
2128 for i := range table {
2129 table[i] = bEnd
2130 }
2131 for i := range n.Targets {
2132 c := n.Cases[i]
2133 lab := s.label(n.Targets[i])
2134 if lab.target == nil {
2135 lab.target = s.f.NewBlock(ssa.BlockPlain)
2136 }
2137 var val uint64
2138 if unsigned {
2139 val, _ = constant.Uint64Val(c)
2140 } else {
2141 vl, _ := constant.Int64Val(c)
2142 val = uint64(vl)
2143 }
2144
2145 table[val-min] = lab.target
2146 }
2147 for _, t := range table {
2148 jt.AddEdgeTo(t)
2149 }
2150 s.endBlock()
2151
2152 s.startBlock(bEnd)
2153
2154 case ir.OINTERFACESWITCH:
2155 n := n.(*ir.InterfaceSwitchStmt)
2156 typs := s.f.Config.Types
2157
2158 t := s.expr(n.RuntimeType)
2159 h := s.expr(n.Hash)
2160 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2161
2162
2163 var merge *ssa.Block
2164 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
2165
2166
2167 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
2168 s.Fatalf("atomic load not available")
2169 }
2170 merge = s.f.NewBlock(ssa.BlockPlain)
2171 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2172 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2173 loopHead := s.f.NewBlock(ssa.BlockPlain)
2174 loopBody := s.f.NewBlock(ssa.BlockPlain)
2175
2176
2177 var mul, and, add, zext ssa.Op
2178 if s.config.PtrSize == 4 {
2179 mul = ssa.OpMul32
2180 and = ssa.OpAnd32
2181 add = ssa.OpAdd32
2182 zext = ssa.OpCopy
2183 } else {
2184 mul = ssa.OpMul64
2185 and = ssa.OpAnd64
2186 add = ssa.OpAdd64
2187 zext = ssa.OpZeroExt32to64
2188 }
2189
2190
2191
2192 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2193 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2194 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2195
2196
2197 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2198
2199
2200 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2201
2202 b := s.endBlock()
2203 b.AddEdgeTo(loopHead)
2204
2205
2206
2207 s.startBlock(loopHead)
2208 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2209 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2210 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2211 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2212
2213 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2214
2215
2216
2217 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2218 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2219 b = s.endBlock()
2220 b.Kind = ssa.BlockIf
2221 b.SetControl(cmp1)
2222 b.AddEdgeTo(cacheHit)
2223 b.AddEdgeTo(loopBody)
2224
2225
2226
2227 s.startBlock(loopBody)
2228 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2229 b = s.endBlock()
2230 b.Kind = ssa.BlockIf
2231 b.SetControl(cmp2)
2232 b.AddEdgeTo(cacheMiss)
2233 b.AddEdgeTo(loopHead)
2234
2235
2236
2237
2238 s.startBlock(cacheHit)
2239 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2240 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2241 s.assign(n.Case, eCase, false, 0)
2242 s.assign(n.Itab, eItab, false, 0)
2243 b = s.endBlock()
2244 b.AddEdgeTo(merge)
2245
2246
2247 s.startBlock(cacheMiss)
2248 }
2249
2250 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2251 s.assign(n.Case, r[0], false, 0)
2252 s.assign(n.Itab, r[1], false, 0)
2253
2254 if merge != nil {
2255
2256 b := s.endBlock()
2257 b.Kind = ssa.BlockPlain
2258 b.AddEdgeTo(merge)
2259 s.startBlock(merge)
2260 }
2261
2262 case ir.OCHECKNIL:
2263 n := n.(*ir.UnaryExpr)
2264 p := s.expr(n.X)
2265 _ = s.nilCheck(p)
2266
2267
2268 case ir.OINLMARK:
2269 n := n.(*ir.InlineMarkStmt)
2270 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2271
2272 default:
2273 s.Fatalf("unhandled stmt %v", n.Op())
2274 }
2275 }
2276
2277
2278
2279 const shareDeferExits = false
2280
2281
2282
2283
2284 func (s *state) exit() *ssa.Block {
2285 if s.hasdefer {
2286 if s.hasOpenDefers {
2287 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2288 if s.curBlock.Kind != ssa.BlockPlain {
2289 panic("Block for an exit should be BlockPlain")
2290 }
2291 s.curBlock.AddEdgeTo(s.lastDeferExit)
2292 s.endBlock()
2293 return s.lastDeferFinalBlock
2294 }
2295 s.openDeferExit()
2296 } else {
2297
2298
2299
2300
2301
2302
2303
2304
2305 s.pushLine(s.curfn.Endlineno)
2306 s.rtcall(ir.Syms.Deferreturn, true, nil)
2307 s.popLine()
2308 }
2309 }
2310
2311
2312
2313 resultFields := s.curfn.Type().Results()
2314 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2315
2316 for i, f := range resultFields {
2317 n := f.Nname.(*ir.Name)
2318 if s.canSSA(n) {
2319 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2320
2321 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2322 }
2323 results[i] = s.variable(n, n.Type())
2324 } else if !n.OnStack() {
2325
2326 if n.Type().HasPointers() {
2327 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2328 }
2329 ha := s.expr(n.Heapaddr)
2330 s.instrumentFields(n.Type(), ha, instrumentRead)
2331 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2332 } else {
2333
2334
2335
2336 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2337 }
2338 }
2339
2340
2341
2342
2343 if s.instrumentEnterExit {
2344 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2345 }
2346
2347 results[len(results)-1] = s.mem()
2348 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2349 m.AddArgs(results...)
2350
2351 b := s.endBlock()
2352 b.Kind = ssa.BlockRet
2353 b.SetControl(m)
2354 if s.hasdefer && s.hasOpenDefers {
2355 s.lastDeferFinalBlock = b
2356 }
2357 return b
2358 }
2359
2360 type opAndType struct {
2361 op ir.Op
2362 etype types.Kind
2363 }
2364
2365 var opToSSA = map[opAndType]ssa.Op{
2366 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2367 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2368 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2369 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2370 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2371 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2372 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2373 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2374 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2375 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2376
2377 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2378 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2379 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2380 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2381 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2382 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2383 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2384 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2385 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2386 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2387
2388 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2389
2390 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2391 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2392 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2393 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2394 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2395 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2396 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2397 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2398 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2399 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2400
2401 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2402 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2403 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2404 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2405 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2406 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2407 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2408 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2409
2410 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2411 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2412 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2413 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2414
2415 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2416 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2417 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2418 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2419 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2420 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2421 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2422 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2423 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2424 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2425
2426 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2427 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2428
2429 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2430 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2431 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2432 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2433 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2434 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2435 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2436 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2437
2438 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2439 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2440 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2441 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2442 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2443 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2444 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2445 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2446
2447 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2448 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2449 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2450 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2451 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2452 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2453 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2454 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2455
2456 {ir.OOR, types.TINT8}: ssa.OpOr8,
2457 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2458 {ir.OOR, types.TINT16}: ssa.OpOr16,
2459 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2460 {ir.OOR, types.TINT32}: ssa.OpOr32,
2461 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2462 {ir.OOR, types.TINT64}: ssa.OpOr64,
2463 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2464
2465 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2466 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2467 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2468 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2469 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2470 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2471 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2472 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2473
2474 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2475 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2476 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2477 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2478 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2479 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2480 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2481 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2482 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2483 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2484 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2485 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2486 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2487 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2488 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2489 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2490 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2491 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2492 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2493
2494 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2495 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2496 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2497 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2498 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2499 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2500 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2501 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2502 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2503 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2504 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2505 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2506 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2507 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2508 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2509 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2510 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2511 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2512 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2513
2514 {ir.OLT, types.TINT8}: ssa.OpLess8,
2515 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2516 {ir.OLT, types.TINT16}: ssa.OpLess16,
2517 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2518 {ir.OLT, types.TINT32}: ssa.OpLess32,
2519 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2520 {ir.OLT, types.TINT64}: ssa.OpLess64,
2521 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2522 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2523 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2524
2525 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2526 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2527 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2528 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2529 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2530 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2531 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2532 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2533 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2534 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2535 }
2536
2537 func (s *state) concreteEtype(t *types.Type) types.Kind {
2538 e := t.Kind()
2539 switch e {
2540 default:
2541 return e
2542 case types.TINT:
2543 if s.config.PtrSize == 8 {
2544 return types.TINT64
2545 }
2546 return types.TINT32
2547 case types.TUINT:
2548 if s.config.PtrSize == 8 {
2549 return types.TUINT64
2550 }
2551 return types.TUINT32
2552 case types.TUINTPTR:
2553 if s.config.PtrSize == 8 {
2554 return types.TUINT64
2555 }
2556 return types.TUINT32
2557 }
2558 }
2559
2560 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2561 etype := s.concreteEtype(t)
2562 x, ok := opToSSA[opAndType{op, etype}]
2563 if !ok {
2564 s.Fatalf("unhandled binary op %v %s", op, etype)
2565 }
2566 return x
2567 }
2568
2569 type opAndTwoTypes struct {
2570 op ir.Op
2571 etype1 types.Kind
2572 etype2 types.Kind
2573 }
2574
2575 type twoTypes struct {
2576 etype1 types.Kind
2577 etype2 types.Kind
2578 }
2579
2580 type twoOpsAndType struct {
2581 op1 ssa.Op
2582 op2 ssa.Op
2583 intermediateType types.Kind
2584 }
2585
2586 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2587
2588 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2589 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2590 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2591 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2592
2593 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2594 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2595 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2596 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2597
2598 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2599 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2600 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2601 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2602
2603 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2604 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2605 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2606 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2607
2608 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2609 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2610 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2611 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2612
2613 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2614 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2615 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2616 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2617
2618 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2619 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2620 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2621 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2622
2623 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2624 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2625 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2626 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2627
2628
2629 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2630 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2631 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2632 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2633 }
2634
2635
2636
2637 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2638 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2639 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2640 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2641 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2642 }
2643
2644
2645 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2646 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2647 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2648 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2649 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2650 }
2651
2652 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2653 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2654 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2655 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2656 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2657 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2658 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2659 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2660 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2661
2662 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2663 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2664 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2665 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2666 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2667 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2668 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2669 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2670
2671 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2672 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2673 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2674 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2675 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2676 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2677 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2678 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2679
2680 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2681 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2682 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2683 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2684 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2685 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2686 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2687 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2688
2689 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2690 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2691 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2692 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2693 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2694 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2695 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2696 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2697
2698 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2699 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2700 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2701 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2702 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2703 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2704 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2705 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2706
2707 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2708 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2709 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2710 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2711 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2712 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2713 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2714 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2715
2716 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2717 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2718 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2719 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2720 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2721 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2722 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2723 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2724 }
2725
2726 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2727 etype1 := s.concreteEtype(t)
2728 etype2 := s.concreteEtype(u)
2729 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2730 if !ok {
2731 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2732 }
2733 return x
2734 }
2735
2736 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2737 if s.config.PtrSize == 4 {
2738 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2739 }
2740 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2741 }
2742
2743 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2744 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2745
2746 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2747 }
2748 if ft.IsInteger() && tt.IsInteger() {
2749 var op ssa.Op
2750 if tt.Size() == ft.Size() {
2751 op = ssa.OpCopy
2752 } else if tt.Size() < ft.Size() {
2753
2754 switch 10*ft.Size() + tt.Size() {
2755 case 21:
2756 op = ssa.OpTrunc16to8
2757 case 41:
2758 op = ssa.OpTrunc32to8
2759 case 42:
2760 op = ssa.OpTrunc32to16
2761 case 81:
2762 op = ssa.OpTrunc64to8
2763 case 82:
2764 op = ssa.OpTrunc64to16
2765 case 84:
2766 op = ssa.OpTrunc64to32
2767 default:
2768 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2769 }
2770 } else if ft.IsSigned() {
2771
2772 switch 10*ft.Size() + tt.Size() {
2773 case 12:
2774 op = ssa.OpSignExt8to16
2775 case 14:
2776 op = ssa.OpSignExt8to32
2777 case 18:
2778 op = ssa.OpSignExt8to64
2779 case 24:
2780 op = ssa.OpSignExt16to32
2781 case 28:
2782 op = ssa.OpSignExt16to64
2783 case 48:
2784 op = ssa.OpSignExt32to64
2785 default:
2786 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2787 }
2788 } else {
2789
2790 switch 10*ft.Size() + tt.Size() {
2791 case 12:
2792 op = ssa.OpZeroExt8to16
2793 case 14:
2794 op = ssa.OpZeroExt8to32
2795 case 18:
2796 op = ssa.OpZeroExt8to64
2797 case 24:
2798 op = ssa.OpZeroExt16to32
2799 case 28:
2800 op = ssa.OpZeroExt16to64
2801 case 48:
2802 op = ssa.OpZeroExt32to64
2803 default:
2804 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2805 }
2806 }
2807 return s.newValue1(op, tt, v)
2808 }
2809
2810 if ft.IsComplex() && tt.IsComplex() {
2811 var op ssa.Op
2812 if ft.Size() == tt.Size() {
2813 switch ft.Size() {
2814 case 8:
2815 op = ssa.OpRound32F
2816 case 16:
2817 op = ssa.OpRound64F
2818 default:
2819 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2820 }
2821 } else if ft.Size() == 8 && tt.Size() == 16 {
2822 op = ssa.OpCvt32Fto64F
2823 } else if ft.Size() == 16 && tt.Size() == 8 {
2824 op = ssa.OpCvt64Fto32F
2825 } else {
2826 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2827 }
2828 ftp := types.FloatForComplex(ft)
2829 ttp := types.FloatForComplex(tt)
2830 return s.newValue2(ssa.OpComplexMake, tt,
2831 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2832 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2833 }
2834
2835 if tt.IsComplex() {
2836
2837 et := types.FloatForComplex(tt)
2838 v = s.conv(n, v, ft, et)
2839 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2840 }
2841
2842 if ft.IsFloat() || tt.IsFloat() {
2843 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2844 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2845 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2846 conv = conv1
2847 }
2848 }
2849 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2850 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2851 conv = conv1
2852 }
2853 }
2854
2855 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2856 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2857
2858 if tt.Size() == 4 {
2859 return s.uint32Tofloat32(n, v, ft, tt)
2860 }
2861 if tt.Size() == 8 {
2862 return s.uint32Tofloat64(n, v, ft, tt)
2863 }
2864 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2865
2866 if ft.Size() == 4 {
2867 return s.float32ToUint32(n, v, ft, tt)
2868 }
2869 if ft.Size() == 8 {
2870 return s.float64ToUint32(n, v, ft, tt)
2871 }
2872 }
2873 }
2874
2875 if !ok {
2876 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2877 }
2878 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2879
2880 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2881
2882 if op1 == ssa.OpCopy {
2883 if op2 == ssa.OpCopy {
2884 return v
2885 }
2886 return s.newValueOrSfCall1(op2, tt, v)
2887 }
2888 if op2 == ssa.OpCopy {
2889 return s.newValueOrSfCall1(op1, tt, v)
2890 }
2891 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2892 }
2893
2894 if ft.IsInteger() {
2895
2896 if tt.Size() == 4 {
2897 return s.uint64Tofloat32(n, v, ft, tt)
2898 }
2899 if tt.Size() == 8 {
2900 return s.uint64Tofloat64(n, v, ft, tt)
2901 }
2902 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2903 }
2904
2905 if ft.Size() == 4 {
2906 return s.float32ToUint64(n, v, ft, tt)
2907 }
2908 if ft.Size() == 8 {
2909 return s.float64ToUint64(n, v, ft, tt)
2910 }
2911 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2912 return nil
2913 }
2914
2915 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2916 return nil
2917 }
2918
2919
2920 func (s *state) expr(n ir.Node) *ssa.Value {
2921 return s.exprCheckPtr(n, true)
2922 }
2923
2924 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2925 if ir.HasUniquePos(n) {
2926
2927
2928 s.pushLine(n.Pos())
2929 defer s.popLine()
2930 }
2931
2932 s.stmtList(n.Init())
2933 switch n.Op() {
2934 case ir.OBYTES2STRTMP:
2935 n := n.(*ir.ConvExpr)
2936 slice := s.expr(n.X)
2937 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2938 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2939 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2940 case ir.OSTR2BYTESTMP:
2941 n := n.(*ir.ConvExpr)
2942 str := s.expr(n.X)
2943 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2944 if !n.NonNil() {
2945
2946
2947
2948 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2949 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2950 ptr = s.ternary(cond, ptr, zerobase)
2951 }
2952 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2953 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2954 case ir.OCFUNC:
2955 n := n.(*ir.UnaryExpr)
2956 aux := n.X.(*ir.Name).Linksym()
2957
2958
2959 if aux.ABI() != obj.ABIInternal {
2960 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2961 }
2962 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2963 case ir.ONAME:
2964 n := n.(*ir.Name)
2965 if n.Class == ir.PFUNC {
2966
2967 sym := staticdata.FuncLinksym(n)
2968 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2969 }
2970 if s.canSSA(n) {
2971 return s.variable(n, n.Type())
2972 }
2973 return s.load(n.Type(), s.addr(n))
2974 case ir.OLINKSYMOFFSET:
2975 n := n.(*ir.LinksymOffsetExpr)
2976 return s.load(n.Type(), s.addr(n))
2977 case ir.ONIL:
2978 n := n.(*ir.NilExpr)
2979 t := n.Type()
2980 switch {
2981 case t.IsSlice():
2982 return s.constSlice(t)
2983 case t.IsInterface():
2984 return s.constInterface(t)
2985 default:
2986 return s.constNil(t)
2987 }
2988 case ir.OLITERAL:
2989 switch u := n.Val(); u.Kind() {
2990 case constant.Int:
2991 i := ir.IntVal(n.Type(), u)
2992 switch n.Type().Size() {
2993 case 1:
2994 return s.constInt8(n.Type(), int8(i))
2995 case 2:
2996 return s.constInt16(n.Type(), int16(i))
2997 case 4:
2998 return s.constInt32(n.Type(), int32(i))
2999 case 8:
3000 return s.constInt64(n.Type(), i)
3001 default:
3002 s.Fatalf("bad integer size %d", n.Type().Size())
3003 return nil
3004 }
3005 case constant.String:
3006 i := constant.StringVal(u)
3007 if i == "" {
3008 return s.constEmptyString(n.Type())
3009 }
3010 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
3011 case constant.Bool:
3012 return s.constBool(constant.BoolVal(u))
3013 case constant.Float:
3014 f, _ := constant.Float64Val(u)
3015 switch n.Type().Size() {
3016 case 4:
3017 return s.constFloat32(n.Type(), f)
3018 case 8:
3019 return s.constFloat64(n.Type(), f)
3020 default:
3021 s.Fatalf("bad float size %d", n.Type().Size())
3022 return nil
3023 }
3024 case constant.Complex:
3025 re, _ := constant.Float64Val(constant.Real(u))
3026 im, _ := constant.Float64Val(constant.Imag(u))
3027 switch n.Type().Size() {
3028 case 8:
3029 pt := types.Types[types.TFLOAT32]
3030 return s.newValue2(ssa.OpComplexMake, n.Type(),
3031 s.constFloat32(pt, re),
3032 s.constFloat32(pt, im))
3033 case 16:
3034 pt := types.Types[types.TFLOAT64]
3035 return s.newValue2(ssa.OpComplexMake, n.Type(),
3036 s.constFloat64(pt, re),
3037 s.constFloat64(pt, im))
3038 default:
3039 s.Fatalf("bad complex size %d", n.Type().Size())
3040 return nil
3041 }
3042 default:
3043 s.Fatalf("unhandled OLITERAL %v", u.Kind())
3044 return nil
3045 }
3046 case ir.OCONVNOP:
3047 n := n.(*ir.ConvExpr)
3048 to := n.Type()
3049 from := n.X.Type()
3050
3051
3052
3053 x := s.expr(n.X)
3054 if to == from {
3055 return x
3056 }
3057
3058
3059
3060
3061
3062 if to.IsPtrShaped() != from.IsPtrShaped() {
3063 return s.newValue2(ssa.OpConvert, to, x, s.mem())
3064 }
3065
3066 v := s.newValue1(ssa.OpCopy, to, x)
3067
3068
3069 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
3070 return v
3071 }
3072
3073
3074 if from.Kind() == to.Kind() {
3075 return v
3076 }
3077
3078
3079 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
3080 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
3081 s.checkPtrAlignment(n, v, nil)
3082 }
3083 return v
3084 }
3085
3086
3087 var mt *types.Type
3088 if buildcfg.Experiment.SwissMap {
3089 mt = types.NewPtr(reflectdata.SwissMapType())
3090 } else {
3091 mt = types.NewPtr(reflectdata.OldMapType())
3092 }
3093 if to.Kind() == types.TMAP && from == mt {
3094 return v
3095 }
3096
3097 types.CalcSize(from)
3098 types.CalcSize(to)
3099 if from.Size() != to.Size() {
3100 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
3101 return nil
3102 }
3103 if etypesign(from.Kind()) != etypesign(to.Kind()) {
3104 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
3105 return nil
3106 }
3107
3108 if base.Flag.Cfg.Instrumenting {
3109
3110
3111
3112 return v
3113 }
3114
3115 if etypesign(from.Kind()) == 0 {
3116 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
3117 return nil
3118 }
3119
3120
3121 return v
3122
3123 case ir.OCONV:
3124 n := n.(*ir.ConvExpr)
3125 x := s.expr(n.X)
3126 return s.conv(n, x, n.X.Type(), n.Type())
3127
3128 case ir.ODOTTYPE:
3129 n := n.(*ir.TypeAssertExpr)
3130 res, _ := s.dottype(n, false)
3131 return res
3132
3133 case ir.ODYNAMICDOTTYPE:
3134 n := n.(*ir.DynamicTypeAssertExpr)
3135 res, _ := s.dynamicDottype(n, false)
3136 return res
3137
3138
3139 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
3140 n := n.(*ir.BinaryExpr)
3141 a := s.expr(n.X)
3142 b := s.expr(n.Y)
3143 if n.X.Type().IsComplex() {
3144 pt := types.FloatForComplex(n.X.Type())
3145 op := s.ssaOp(ir.OEQ, pt)
3146 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
3147 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
3148 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
3149 switch n.Op() {
3150 case ir.OEQ:
3151 return c
3152 case ir.ONE:
3153 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
3154 default:
3155 s.Fatalf("ordered complex compare %v", n.Op())
3156 }
3157 }
3158
3159
3160 op := n.Op()
3161 switch op {
3162 case ir.OGE:
3163 op, a, b = ir.OLE, b, a
3164 case ir.OGT:
3165 op, a, b = ir.OLT, b, a
3166 }
3167 if n.X.Type().IsFloat() {
3168
3169 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3170 }
3171
3172 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3173 case ir.OMUL:
3174 n := n.(*ir.BinaryExpr)
3175 a := s.expr(n.X)
3176 b := s.expr(n.Y)
3177 if n.Type().IsComplex() {
3178 mulop := ssa.OpMul64F
3179 addop := ssa.OpAdd64F
3180 subop := ssa.OpSub64F
3181 pt := types.FloatForComplex(n.Type())
3182 wt := types.Types[types.TFLOAT64]
3183
3184 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3185 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3186 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3187 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3188
3189 if pt != wt {
3190 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3191 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3192 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3193 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3194 }
3195
3196 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3197 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3198
3199 if pt != wt {
3200 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3201 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3202 }
3203
3204 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3205 }
3206
3207 if n.Type().IsFloat() {
3208 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3209 }
3210
3211 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3212
3213 case ir.ODIV:
3214 n := n.(*ir.BinaryExpr)
3215 a := s.expr(n.X)
3216 b := s.expr(n.Y)
3217 if n.Type().IsComplex() {
3218
3219
3220
3221 mulop := ssa.OpMul64F
3222 addop := ssa.OpAdd64F
3223 subop := ssa.OpSub64F
3224 divop := ssa.OpDiv64F
3225 pt := types.FloatForComplex(n.Type())
3226 wt := types.Types[types.TFLOAT64]
3227
3228 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3229 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3230 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3231 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3232
3233 if pt != wt {
3234 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3235 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3236 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3237 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3238 }
3239
3240 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3241 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3242 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3243
3244
3245
3246
3247
3248 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3249 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3250
3251 if pt != wt {
3252 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3253 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3254 }
3255 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3256 }
3257 if n.Type().IsFloat() {
3258 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3259 }
3260 return s.intDivide(n, a, b)
3261 case ir.OMOD:
3262 n := n.(*ir.BinaryExpr)
3263 a := s.expr(n.X)
3264 b := s.expr(n.Y)
3265 return s.intDivide(n, a, b)
3266 case ir.OADD, ir.OSUB:
3267 n := n.(*ir.BinaryExpr)
3268 a := s.expr(n.X)
3269 b := s.expr(n.Y)
3270 if n.Type().IsComplex() {
3271 pt := types.FloatForComplex(n.Type())
3272 op := s.ssaOp(n.Op(), pt)
3273 return s.newValue2(ssa.OpComplexMake, n.Type(),
3274 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3275 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3276 }
3277 if n.Type().IsFloat() {
3278 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3279 }
3280 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3281 case ir.OAND, ir.OOR, ir.OXOR:
3282 n := n.(*ir.BinaryExpr)
3283 a := s.expr(n.X)
3284 b := s.expr(n.Y)
3285 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3286 case ir.OANDNOT:
3287 n := n.(*ir.BinaryExpr)
3288 a := s.expr(n.X)
3289 b := s.expr(n.Y)
3290 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3291 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3292 case ir.OLSH, ir.ORSH:
3293 n := n.(*ir.BinaryExpr)
3294 a := s.expr(n.X)
3295 b := s.expr(n.Y)
3296 bt := b.Type
3297 if bt.IsSigned() {
3298 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3299 s.check(cmp, ir.Syms.Panicshift)
3300 bt = bt.ToUnsigned()
3301 }
3302 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3303 case ir.OANDAND, ir.OOROR:
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
3315
3316
3317 n := n.(*ir.LogicalExpr)
3318 el := s.expr(n.X)
3319 s.vars[n] = el
3320
3321 b := s.endBlock()
3322 b.Kind = ssa.BlockIf
3323 b.SetControl(el)
3324
3325
3326
3327
3328
3329 bRight := s.f.NewBlock(ssa.BlockPlain)
3330 bResult := s.f.NewBlock(ssa.BlockPlain)
3331 if n.Op() == ir.OANDAND {
3332 b.AddEdgeTo(bRight)
3333 b.AddEdgeTo(bResult)
3334 } else if n.Op() == ir.OOROR {
3335 b.AddEdgeTo(bResult)
3336 b.AddEdgeTo(bRight)
3337 }
3338
3339 s.startBlock(bRight)
3340 er := s.expr(n.Y)
3341 s.vars[n] = er
3342
3343 b = s.endBlock()
3344 b.AddEdgeTo(bResult)
3345
3346 s.startBlock(bResult)
3347 return s.variable(n, types.Types[types.TBOOL])
3348 case ir.OCOMPLEX:
3349 n := n.(*ir.BinaryExpr)
3350 r := s.expr(n.X)
3351 i := s.expr(n.Y)
3352 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3353
3354
3355 case ir.ONEG:
3356 n := n.(*ir.UnaryExpr)
3357 a := s.expr(n.X)
3358 if n.Type().IsComplex() {
3359 tp := types.FloatForComplex(n.Type())
3360 negop := s.ssaOp(n.Op(), tp)
3361 return s.newValue2(ssa.OpComplexMake, n.Type(),
3362 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3363 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3364 }
3365 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3366 case ir.ONOT, ir.OBITNOT:
3367 n := n.(*ir.UnaryExpr)
3368 a := s.expr(n.X)
3369 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3370 case ir.OIMAG, ir.OREAL:
3371 n := n.(*ir.UnaryExpr)
3372 a := s.expr(n.X)
3373 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3374 case ir.OPLUS:
3375 n := n.(*ir.UnaryExpr)
3376 return s.expr(n.X)
3377
3378 case ir.OADDR:
3379 n := n.(*ir.AddrExpr)
3380 return s.addr(n.X)
3381
3382 case ir.ORESULT:
3383 n := n.(*ir.ResultExpr)
3384 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3385 panic("Expected to see a previous call")
3386 }
3387 which := n.Index
3388 if which == -1 {
3389 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3390 }
3391 return s.resultOfCall(s.prevCall, which, n.Type())
3392
3393 case ir.ODEREF:
3394 n := n.(*ir.StarExpr)
3395 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3396 return s.load(n.Type(), p)
3397
3398 case ir.ODOT:
3399 n := n.(*ir.SelectorExpr)
3400 if n.X.Op() == ir.OSTRUCTLIT {
3401
3402
3403
3404 if !ir.IsZero(n.X) {
3405 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3406 }
3407 return s.zeroVal(n.Type())
3408 }
3409
3410
3411
3412
3413 if ir.IsAddressable(n) && !s.canSSA(n) {
3414 p := s.addr(n)
3415 return s.load(n.Type(), p)
3416 }
3417 v := s.expr(n.X)
3418 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3419
3420 case ir.ODOTPTR:
3421 n := n.(*ir.SelectorExpr)
3422 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3423 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3424 return s.load(n.Type(), p)
3425
3426 case ir.OINDEX:
3427 n := n.(*ir.IndexExpr)
3428 switch {
3429 case n.X.Type().IsString():
3430 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3431
3432
3433
3434 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3435 }
3436 a := s.expr(n.X)
3437 i := s.expr(n.Index)
3438 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3439 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3440 ptrtyp := s.f.Config.Types.BytePtr
3441 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3442 if ir.IsConst(n.Index, constant.Int) {
3443 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3444 } else {
3445 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3446 }
3447 return s.load(types.Types[types.TUINT8], ptr)
3448 case n.X.Type().IsSlice():
3449 p := s.addr(n)
3450 return s.load(n.X.Type().Elem(), p)
3451 case n.X.Type().IsArray():
3452 if ssa.CanSSA(n.X.Type()) {
3453
3454 bound := n.X.Type().NumElem()
3455 a := s.expr(n.X)
3456 i := s.expr(n.Index)
3457 if bound == 0 {
3458
3459
3460 z := s.constInt(types.Types[types.TINT], 0)
3461 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3462
3463
3464 return s.zeroVal(n.Type())
3465 }
3466 len := s.constInt(types.Types[types.TINT], bound)
3467 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3468 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3469 }
3470 p := s.addr(n)
3471 return s.load(n.X.Type().Elem(), p)
3472 default:
3473 s.Fatalf("bad type for index %v", n.X.Type())
3474 return nil
3475 }
3476
3477 case ir.OLEN, ir.OCAP:
3478 n := n.(*ir.UnaryExpr)
3479
3480
3481 a := s.expr(n.X)
3482 t := n.X.Type()
3483 switch {
3484 case t.IsSlice():
3485 op := ssa.OpSliceLen
3486 if n.Op() == ir.OCAP {
3487 op = ssa.OpSliceCap
3488 }
3489 return s.newValue1(op, types.Types[types.TINT], a)
3490 case t.IsString():
3491 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3492 case t.IsMap(), t.IsChan():
3493 return s.referenceTypeBuiltin(n, a)
3494 case t.IsArray():
3495 return s.constInt(types.Types[types.TINT], t.NumElem())
3496 case t.IsPtr() && t.Elem().IsArray():
3497 return s.constInt(types.Types[types.TINT], t.Elem().NumElem())
3498 default:
3499 s.Fatalf("bad type in len/cap: %v", t)
3500 return nil
3501 }
3502
3503 case ir.OSPTR:
3504 n := n.(*ir.UnaryExpr)
3505 a := s.expr(n.X)
3506 if n.X.Type().IsSlice() {
3507 if n.Bounded() {
3508 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3509 }
3510 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3511 } else {
3512 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3513 }
3514
3515 case ir.OITAB:
3516 n := n.(*ir.UnaryExpr)
3517 a := s.expr(n.X)
3518 return s.newValue1(ssa.OpITab, n.Type(), a)
3519
3520 case ir.OIDATA:
3521 n := n.(*ir.UnaryExpr)
3522 a := s.expr(n.X)
3523 return s.newValue1(ssa.OpIData, n.Type(), a)
3524
3525 case ir.OMAKEFACE:
3526 n := n.(*ir.BinaryExpr)
3527 tab := s.expr(n.X)
3528 data := s.expr(n.Y)
3529 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3530
3531 case ir.OSLICEHEADER:
3532 n := n.(*ir.SliceHeaderExpr)
3533 p := s.expr(n.Ptr)
3534 l := s.expr(n.Len)
3535 c := s.expr(n.Cap)
3536 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3537
3538 case ir.OSTRINGHEADER:
3539 n := n.(*ir.StringHeaderExpr)
3540 p := s.expr(n.Ptr)
3541 l := s.expr(n.Len)
3542 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3543
3544 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3545 n := n.(*ir.SliceExpr)
3546 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3547 v := s.exprCheckPtr(n.X, !check)
3548 var i, j, k *ssa.Value
3549 if n.Low != nil {
3550 i = s.expr(n.Low)
3551 }
3552 if n.High != nil {
3553 j = s.expr(n.High)
3554 }
3555 if n.Max != nil {
3556 k = s.expr(n.Max)
3557 }
3558 p, l, c := s.slice(v, i, j, k, n.Bounded())
3559 if check {
3560
3561 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3562 }
3563 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3564
3565 case ir.OSLICESTR:
3566 n := n.(*ir.SliceExpr)
3567 v := s.expr(n.X)
3568 var i, j *ssa.Value
3569 if n.Low != nil {
3570 i = s.expr(n.Low)
3571 }
3572 if n.High != nil {
3573 j = s.expr(n.High)
3574 }
3575 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3576 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3577
3578 case ir.OSLICE2ARRPTR:
3579
3580
3581
3582
3583 n := n.(*ir.ConvExpr)
3584 v := s.expr(n.X)
3585 nelem := n.Type().Elem().NumElem()
3586 arrlen := s.constInt(types.Types[types.TINT], nelem)
3587 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3588 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3589 op := ssa.OpSlicePtr
3590 if nelem == 0 {
3591 op = ssa.OpSlicePtrUnchecked
3592 }
3593 return s.newValue1(op, n.Type(), v)
3594
3595 case ir.OCALLFUNC:
3596 n := n.(*ir.CallExpr)
3597 if ir.IsIntrinsicCall(n) {
3598 return s.intrinsicCall(n)
3599 }
3600 fallthrough
3601
3602 case ir.OCALLINTER:
3603 n := n.(*ir.CallExpr)
3604 return s.callResult(n, callNormal)
3605
3606 case ir.OGETG:
3607 n := n.(*ir.CallExpr)
3608 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3609
3610 case ir.OGETCALLERSP:
3611 n := n.(*ir.CallExpr)
3612 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3613
3614 case ir.OAPPEND:
3615 return s.append(n.(*ir.CallExpr), false)
3616
3617 case ir.OMIN, ir.OMAX:
3618 return s.minMax(n.(*ir.CallExpr))
3619
3620 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3621
3622
3623
3624 n := n.(*ir.CompLitExpr)
3625 if !ir.IsZero(n) {
3626 s.Fatalf("literal with nonzero value in SSA: %v", n)
3627 }
3628 return s.zeroVal(n.Type())
3629
3630 case ir.ONEW:
3631 n := n.(*ir.UnaryExpr)
3632 var rtype *ssa.Value
3633 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3634 rtype = s.expr(x.RType)
3635 }
3636 return s.newObject(n.Type().Elem(), rtype)
3637
3638 case ir.OUNSAFEADD:
3639 n := n.(*ir.BinaryExpr)
3640 ptr := s.expr(n.X)
3641 len := s.expr(n.Y)
3642
3643
3644
3645 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3646
3647 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3648
3649 default:
3650 s.Fatalf("unhandled expr %v", n.Op())
3651 return nil
3652 }
3653 }
3654
3655 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3656 aux := c.Aux.(*ssa.AuxCall)
3657 pa := aux.ParamAssignmentForResult(which)
3658
3659
3660 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3661 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3662 return s.rawLoad(t, addr)
3663 }
3664 return s.newValue1I(ssa.OpSelectN, t, which, c)
3665 }
3666
3667 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3668 aux := c.Aux.(*ssa.AuxCall)
3669 pa := aux.ParamAssignmentForResult(which)
3670 if len(pa.Registers) == 0 {
3671 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3672 }
3673 _, addr := s.temp(c.Pos, t)
3674 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3675 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3676 return addr
3677 }
3678
3679
3680
3681
3682
3683
3684
3685
3686
3687 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3688
3689
3690
3691
3692
3693
3694
3695
3696
3697
3698
3699
3700
3701
3702
3703
3704
3705
3706
3707
3708
3709
3710
3711
3712
3713
3714
3715
3716
3717
3718
3719
3720 et := n.Type().Elem()
3721 pt := types.NewPtr(et)
3722
3723
3724 sn := n.Args[0]
3725 var slice, addr *ssa.Value
3726 if inplace {
3727 addr = s.addr(sn)
3728 slice = s.load(n.Type(), addr)
3729 } else {
3730 slice = s.expr(sn)
3731 }
3732
3733
3734 grow := s.f.NewBlock(ssa.BlockPlain)
3735 assign := s.f.NewBlock(ssa.BlockPlain)
3736
3737
3738 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3739 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3740 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3741
3742
3743 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3744 oldLen := l
3745 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3746
3747
3748 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3749
3750
3751 s.vars[ptrVar] = p
3752 s.vars[lenVar] = l
3753 if !inplace {
3754 s.vars[capVar] = c
3755 }
3756
3757 b := s.endBlock()
3758 b.Kind = ssa.BlockIf
3759 b.Likely = ssa.BranchUnlikely
3760 b.SetControl(cmp)
3761 b.AddEdgeTo(grow)
3762 b.AddEdgeTo(assign)
3763
3764
3765
3766
3767
3768
3769
3770 maxStackSize := int64(base.Debug.VariableMakeThreshold)
3771 if !inplace && n.Esc() == ir.EscNone && et.Size() > 0 && et.Size() <= maxStackSize && base.Flag.N == 0 && base.VariableMakeHash.MatchPos(n.Pos(), nil) && !s.appendTargets[sn] {
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793
3794
3795 if s.appendTargets == nil {
3796 s.appendTargets = map[ir.Node]bool{}
3797 }
3798 s.appendTargets[sn] = true
3799
3800 K := maxStackSize / et.Size()
3801 KT := types.NewArray(et, K)
3802 KT.SetNoalg(true)
3803 types.CalcArraySize(KT)
3804
3805 align := types.NewArray(types.Types[types.TUINTPTR], 0)
3806 types.CalcArraySize(align)
3807 storeTyp := types.NewStruct([]*types.Field{
3808 {Sym: types.BlankSym, Type: align},
3809 {Sym: types.BlankSym, Type: KT},
3810 })
3811 storeTyp.SetNoalg(true)
3812 types.CalcStructSize(storeTyp)
3813
3814 usedTestBlock := s.f.NewBlock(ssa.BlockPlain)
3815 oldLenTestBlock := s.f.NewBlock(ssa.BlockPlain)
3816 bodyBlock := s.f.NewBlock(ssa.BlockPlain)
3817 growSlice := s.f.NewBlock(ssa.BlockPlain)
3818
3819
3820 tBool := types.Types[types.TBOOL]
3821 used := typecheck.TempAt(n.Pos(), s.curfn, tBool)
3822 s.defvars[s.f.Entry.ID][used] = s.constBool(false)
3823
3824
3825 tInt := types.Types[types.TINT]
3826 backingStore := typecheck.TempAt(n.Pos(), s.curfn, storeTyp)
3827 backingStore.SetAddrtaken(true)
3828
3829
3830 s.startBlock(grow)
3831 kTest := s.newValue2(s.ssaOp(ir.OLE, tInt), tBool, l, s.constInt(tInt, K))
3832 b := s.endBlock()
3833 b.Kind = ssa.BlockIf
3834 b.SetControl(kTest)
3835 b.AddEdgeTo(usedTestBlock)
3836 b.AddEdgeTo(growSlice)
3837 b.Likely = ssa.BranchLikely
3838
3839
3840 s.startBlock(usedTestBlock)
3841 usedTest := s.newValue1(ssa.OpNot, tBool, s.expr(used))
3842 b = s.endBlock()
3843 b.Kind = ssa.BlockIf
3844 b.SetControl(usedTest)
3845 b.AddEdgeTo(oldLenTestBlock)
3846 b.AddEdgeTo(growSlice)
3847 b.Likely = ssa.BranchLikely
3848
3849
3850 s.startBlock(oldLenTestBlock)
3851 oldLenTest := s.newValue2(s.ssaOp(ir.OEQ, tInt), tBool, oldLen, s.constInt(tInt, 0))
3852 b = s.endBlock()
3853 b.Kind = ssa.BlockIf
3854 b.SetControl(oldLenTest)
3855 b.AddEdgeTo(bodyBlock)
3856 b.AddEdgeTo(growSlice)
3857 b.Likely = ssa.BranchLikely
3858
3859
3860 s.startBlock(bodyBlock)
3861 if et.HasPointers() {
3862 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, backingStore, s.mem())
3863 }
3864 addr := s.addr(backingStore)
3865 s.zero(storeTyp, addr)
3866
3867
3868 s.vars[ptrVar] = addr
3869 s.vars[lenVar] = l
3870 s.vars[capVar] = s.constInt(tInt, K)
3871
3872
3873 s.assign(used, s.constBool(true), false, 0)
3874 b = s.endBlock()
3875 b.AddEdgeTo(assign)
3876
3877
3878 grow = growSlice
3879 }
3880
3881
3882 s.startBlock(grow)
3883 taddr := s.expr(n.Fun)
3884 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3885
3886
3887 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3888 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3889 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3890
3891 s.vars[ptrVar] = p
3892 s.vars[lenVar] = l
3893 s.vars[capVar] = c
3894 if inplace {
3895 if sn.Op() == ir.ONAME {
3896 sn := sn.(*ir.Name)
3897 if sn.Class != ir.PEXTERN {
3898
3899 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3900 }
3901 }
3902 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3903 s.store(types.Types[types.TINT], capaddr, c)
3904 s.store(pt, addr, p)
3905 }
3906
3907 b = s.endBlock()
3908 b.AddEdgeTo(assign)
3909
3910
3911 s.startBlock(assign)
3912 p = s.variable(ptrVar, pt)
3913 l = s.variable(lenVar, types.Types[types.TINT])
3914 if !inplace {
3915 c = s.variable(capVar, types.Types[types.TINT])
3916 }
3917
3918 if inplace {
3919
3920
3921 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3922 s.store(types.Types[types.TINT], lenaddr, l)
3923 }
3924
3925
3926 type argRec struct {
3927
3928
3929 v *ssa.Value
3930 store bool
3931 }
3932 args := make([]argRec, 0, len(n.Args[1:]))
3933 for _, n := range n.Args[1:] {
3934 if ssa.CanSSA(n.Type()) {
3935 args = append(args, argRec{v: s.expr(n), store: true})
3936 } else {
3937 v := s.addr(n)
3938 args = append(args, argRec{v: v})
3939 }
3940 }
3941
3942
3943 oldLen = s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3944 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3945 for i, arg := range args {
3946 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3947 if arg.store {
3948 s.storeType(et, addr, arg.v, 0, true)
3949 } else {
3950 s.move(et, addr, arg.v)
3951 }
3952 }
3953
3954
3955
3956
3957
3958 delete(s.vars, ptrVar)
3959 delete(s.vars, lenVar)
3960 if !inplace {
3961 delete(s.vars, capVar)
3962 }
3963
3964
3965 if inplace {
3966 return nil
3967 }
3968 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3969 }
3970
3971
3972 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3973
3974
3975
3976 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3977 x := s.expr(n.Args[0])
3978 for _, arg := range n.Args[1:] {
3979 x = op(x, s.expr(arg))
3980 }
3981 return x
3982 }
3983
3984 typ := n.Type()
3985
3986 if typ.IsFloat() || typ.IsString() {
3987
3988
3989
3990
3991
3992
3993
3994
3995 if typ.IsFloat() {
3996 hasIntrinsic := false
3997 switch Arch.LinkArch.Family {
3998 case sys.AMD64, sys.ARM64, sys.Loong64, sys.RISCV64:
3999 hasIntrinsic = true
4000 case sys.PPC64:
4001 hasIntrinsic = buildcfg.GOPPC64 >= 9
4002 }
4003
4004 if hasIntrinsic {
4005 var op ssa.Op
4006 switch {
4007 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
4008 op = ssa.OpMin64F
4009 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
4010 op = ssa.OpMax64F
4011 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
4012 op = ssa.OpMin32F
4013 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
4014 op = ssa.OpMax32F
4015 }
4016 return fold(func(x, a *ssa.Value) *ssa.Value {
4017 return s.newValue2(op, typ, x, a)
4018 })
4019 }
4020 }
4021 var name string
4022 switch typ.Kind() {
4023 case types.TFLOAT32:
4024 switch n.Op() {
4025 case ir.OMIN:
4026 name = "fmin32"
4027 case ir.OMAX:
4028 name = "fmax32"
4029 }
4030 case types.TFLOAT64:
4031 switch n.Op() {
4032 case ir.OMIN:
4033 name = "fmin64"
4034 case ir.OMAX:
4035 name = "fmax64"
4036 }
4037 case types.TSTRING:
4038 switch n.Op() {
4039 case ir.OMIN:
4040 name = "strmin"
4041 case ir.OMAX:
4042 name = "strmax"
4043 }
4044 }
4045 fn := typecheck.LookupRuntimeFunc(name)
4046
4047 return fold(func(x, a *ssa.Value) *ssa.Value {
4048 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
4049 })
4050 }
4051
4052 if typ.IsInteger() {
4053 if Arch.LinkArch.Family == sys.RISCV64 && buildcfg.GORISCV64 >= 22 && typ.Size() == 8 {
4054 var op ssa.Op
4055 switch {
4056 case typ.IsSigned() && n.Op() == ir.OMIN:
4057 op = ssa.OpMin64
4058 case typ.IsSigned() && n.Op() == ir.OMAX:
4059 op = ssa.OpMax64
4060 case typ.IsUnsigned() && n.Op() == ir.OMIN:
4061 op = ssa.OpMin64u
4062 case typ.IsUnsigned() && n.Op() == ir.OMAX:
4063 op = ssa.OpMax64u
4064 }
4065 return fold(func(x, a *ssa.Value) *ssa.Value {
4066 return s.newValue2(op, typ, x, a)
4067 })
4068 }
4069 }
4070
4071 lt := s.ssaOp(ir.OLT, typ)
4072
4073 return fold(func(x, a *ssa.Value) *ssa.Value {
4074 switch n.Op() {
4075 case ir.OMIN:
4076
4077 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
4078 case ir.OMAX:
4079
4080 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
4081 }
4082 panic("unreachable")
4083 })
4084 }
4085
4086
4087 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
4088
4089
4090 ternaryVar := ssaMarker("ternary")
4091
4092 bThen := s.f.NewBlock(ssa.BlockPlain)
4093 bElse := s.f.NewBlock(ssa.BlockPlain)
4094 bEnd := s.f.NewBlock(ssa.BlockPlain)
4095
4096 b := s.endBlock()
4097 b.Kind = ssa.BlockIf
4098 b.SetControl(cond)
4099 b.AddEdgeTo(bThen)
4100 b.AddEdgeTo(bElse)
4101
4102 s.startBlock(bThen)
4103 s.vars[ternaryVar] = x
4104 s.endBlock().AddEdgeTo(bEnd)
4105
4106 s.startBlock(bElse)
4107 s.vars[ternaryVar] = y
4108 s.endBlock().AddEdgeTo(bEnd)
4109
4110 s.startBlock(bEnd)
4111 r := s.variable(ternaryVar, x.Type)
4112 delete(s.vars, ternaryVar)
4113 return r
4114 }
4115
4116
4117
4118
4119
4120 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
4121 switch cond.Op() {
4122 case ir.OANDAND:
4123 cond := cond.(*ir.LogicalExpr)
4124 mid := s.f.NewBlock(ssa.BlockPlain)
4125 s.stmtList(cond.Init())
4126 s.condBranch(cond.X, mid, no, max(likely, 0))
4127 s.startBlock(mid)
4128 s.condBranch(cond.Y, yes, no, likely)
4129 return
4130
4131
4132
4133
4134
4135
4136 case ir.OOROR:
4137 cond := cond.(*ir.LogicalExpr)
4138 mid := s.f.NewBlock(ssa.BlockPlain)
4139 s.stmtList(cond.Init())
4140 s.condBranch(cond.X, yes, mid, min(likely, 0))
4141 s.startBlock(mid)
4142 s.condBranch(cond.Y, yes, no, likely)
4143 return
4144
4145
4146
4147 case ir.ONOT:
4148 cond := cond.(*ir.UnaryExpr)
4149 s.stmtList(cond.Init())
4150 s.condBranch(cond.X, no, yes, -likely)
4151 return
4152 case ir.OCONVNOP:
4153 cond := cond.(*ir.ConvExpr)
4154 s.stmtList(cond.Init())
4155 s.condBranch(cond.X, yes, no, likely)
4156 return
4157 }
4158 c := s.expr(cond)
4159 b := s.endBlock()
4160 b.Kind = ssa.BlockIf
4161 b.SetControl(c)
4162 b.Likely = ssa.BranchPrediction(likely)
4163 b.AddEdgeTo(yes)
4164 b.AddEdgeTo(no)
4165 }
4166
4167 type skipMask uint8
4168
4169 const (
4170 skipPtr skipMask = 1 << iota
4171 skipLen
4172 skipCap
4173 )
4174
4175
4176
4177
4178
4179
4180
4181 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
4182 s.assignWhichMayOverlap(left, right, deref, skip, false)
4183 }
4184 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
4185 if left.Op() == ir.ONAME && ir.IsBlank(left) {
4186 return
4187 }
4188 t := left.Type()
4189 types.CalcSize(t)
4190 if s.canSSA(left) {
4191 if deref {
4192 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
4193 }
4194 if left.Op() == ir.ODOT {
4195
4196
4197
4198
4199
4200
4201
4202
4203
4204
4205 left := left.(*ir.SelectorExpr)
4206 t := left.X.Type()
4207 nf := t.NumFields()
4208 idx := fieldIdx(left)
4209
4210
4211 old := s.expr(left.X)
4212
4213
4214 new := s.newValue0(ssa.OpStructMake, t)
4215
4216
4217 for i := 0; i < nf; i++ {
4218 if i == idx {
4219 new.AddArg(right)
4220 } else {
4221 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
4222 }
4223 }
4224
4225
4226 s.assign(left.X, new, false, 0)
4227
4228 return
4229 }
4230 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
4231 left := left.(*ir.IndexExpr)
4232 s.pushLine(left.Pos())
4233 defer s.popLine()
4234
4235
4236 t := left.X.Type()
4237 n := t.NumElem()
4238
4239 i := s.expr(left.Index)
4240 if n == 0 {
4241
4242
4243 z := s.constInt(types.Types[types.TINT], 0)
4244 s.boundsCheck(z, z, ssa.BoundsIndex, false)
4245 return
4246 }
4247 if n != 1 {
4248 s.Fatalf("assigning to non-1-length array")
4249 }
4250
4251 len := s.constInt(types.Types[types.TINT], 1)
4252 s.boundsCheck(i, len, ssa.BoundsIndex, false)
4253 v := s.newValue1(ssa.OpArrayMake1, t, right)
4254 s.assign(left.X, v, false, 0)
4255 return
4256 }
4257 left := left.(*ir.Name)
4258
4259 s.vars[left] = right
4260 s.addNamedValue(left, right)
4261 return
4262 }
4263
4264
4265
4266 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && (t.HasPointers() || ssa.IsMergeCandidate(base)) {
4267 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
4268 }
4269
4270
4271 addr := s.addr(left)
4272 if ir.IsReflectHeaderDataField(left) {
4273
4274
4275
4276
4277
4278 t = types.Types[types.TUNSAFEPTR]
4279 }
4280 if deref {
4281
4282 if right == nil {
4283 s.zero(t, addr)
4284 } else {
4285 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
4286 }
4287 return
4288 }
4289
4290 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
4291 }
4292
4293
4294 func (s *state) zeroVal(t *types.Type) *ssa.Value {
4295 switch {
4296 case t.IsInteger():
4297 switch t.Size() {
4298 case 1:
4299 return s.constInt8(t, 0)
4300 case 2:
4301 return s.constInt16(t, 0)
4302 case 4:
4303 return s.constInt32(t, 0)
4304 case 8:
4305 return s.constInt64(t, 0)
4306 default:
4307 s.Fatalf("bad sized integer type %v", t)
4308 }
4309 case t.IsFloat():
4310 switch t.Size() {
4311 case 4:
4312 return s.constFloat32(t, 0)
4313 case 8:
4314 return s.constFloat64(t, 0)
4315 default:
4316 s.Fatalf("bad sized float type %v", t)
4317 }
4318 case t.IsComplex():
4319 switch t.Size() {
4320 case 8:
4321 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4322 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4323 case 16:
4324 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4325 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4326 default:
4327 s.Fatalf("bad sized complex type %v", t)
4328 }
4329
4330 case t.IsString():
4331 return s.constEmptyString(t)
4332 case t.IsPtrShaped():
4333 return s.constNil(t)
4334 case t.IsBoolean():
4335 return s.constBool(false)
4336 case t.IsInterface():
4337 return s.constInterface(t)
4338 case t.IsSlice():
4339 return s.constSlice(t)
4340 case t.IsStruct():
4341 n := t.NumFields()
4342 v := s.entryNewValue0(ssa.OpStructMake, t)
4343 for i := 0; i < n; i++ {
4344 v.AddArg(s.zeroVal(t.FieldType(i)))
4345 }
4346 return v
4347 case t.IsArray():
4348 switch t.NumElem() {
4349 case 0:
4350 return s.entryNewValue0(ssa.OpArrayMake0, t)
4351 case 1:
4352 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4353 }
4354 }
4355 s.Fatalf("zero for type %v not implemented", t)
4356 return nil
4357 }
4358
4359 type callKind int8
4360
4361 const (
4362 callNormal callKind = iota
4363 callDefer
4364 callDeferStack
4365 callGo
4366 callTail
4367 )
4368
4369 type sfRtCallDef struct {
4370 rtfn *obj.LSym
4371 rtype types.Kind
4372 }
4373
4374 var softFloatOps map[ssa.Op]sfRtCallDef
4375
4376 func softfloatInit() {
4377
4378 softFloatOps = map[ssa.Op]sfRtCallDef{
4379 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4380 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4381 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4382 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4383 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4384 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4385 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4386 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4387
4388 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4389 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4390 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4391 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4392 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4393 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4394 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4395 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4396
4397 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4398 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4399 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4400 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4401 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4402 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4403 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4404 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4405 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4406 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4407 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4408 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4409 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4410 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4411 }
4412 }
4413
4414
4415
4416 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4417 f2i := func(t *types.Type) *types.Type {
4418 switch t.Kind() {
4419 case types.TFLOAT32:
4420 return types.Types[types.TUINT32]
4421 case types.TFLOAT64:
4422 return types.Types[types.TUINT64]
4423 }
4424 return t
4425 }
4426
4427 if callDef, ok := softFloatOps[op]; ok {
4428 switch op {
4429 case ssa.OpLess32F,
4430 ssa.OpLess64F,
4431 ssa.OpLeq32F,
4432 ssa.OpLeq64F:
4433 args[0], args[1] = args[1], args[0]
4434 case ssa.OpSub32F,
4435 ssa.OpSub64F:
4436 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4437 }
4438
4439
4440
4441 for i, a := range args {
4442 if a.Type.IsFloat() {
4443 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4444 }
4445 }
4446
4447 rt := types.Types[callDef.rtype]
4448 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4449 if rt.IsFloat() {
4450 result = s.newValue1(ssa.OpCopy, rt, result)
4451 }
4452 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4453 result = s.newValue1(ssa.OpNot, result.Type, result)
4454 }
4455 return result, true
4456 }
4457 return nil, false
4458 }
4459
4460
4461 func (s *state) split(v *ssa.Value) (*ssa.Value, *ssa.Value) {
4462 p0 := s.newValue1(ssa.OpSelect0, v.Type.FieldType(0), v)
4463 p1 := s.newValue1(ssa.OpSelect1, v.Type.FieldType(1), v)
4464 return p0, p1
4465 }
4466
4467
4468 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
4469 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
4470 if ssa.IntrinsicsDebug > 0 {
4471 x := v
4472 if x == nil {
4473 x = s.mem()
4474 }
4475 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
4476 x = x.Args[0]
4477 }
4478 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
4479 }
4480 return v
4481 }
4482
4483
4484 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
4485 args := make([]*ssa.Value, len(n.Args))
4486 for i, n := range n.Args {
4487 args[i] = s.expr(n)
4488 }
4489 return args
4490 }
4491
4492
4493
4494
4495
4496
4497
4498 func (s *state) openDeferRecord(n *ir.CallExpr) {
4499 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
4500 s.Fatalf("defer call with arguments or results: %v", n)
4501 }
4502
4503 opendefer := &openDeferInfo{
4504 n: n,
4505 }
4506 fn := n.Fun
4507
4508
4509
4510 closureVal := s.expr(fn)
4511 closure := s.openDeferSave(fn.Type(), closureVal)
4512 opendefer.closureNode = closure.Aux.(*ir.Name)
4513 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
4514 opendefer.closure = closure
4515 }
4516 index := len(s.openDefers)
4517 s.openDefers = append(s.openDefers, opendefer)
4518
4519
4520
4521 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
4522 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
4523 s.vars[deferBitsVar] = newDeferBits
4524 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
4525 }
4526
4527
4528
4529
4530
4531
4532 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
4533 if !ssa.CanSSA(t) {
4534 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
4535 }
4536 if !t.HasPointers() {
4537 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
4538 }
4539 pos := val.Pos
4540 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
4541 temp.SetOpenDeferSlot(true)
4542 temp.SetFrameOffset(int64(len(s.openDefers)))
4543 var addrTemp *ssa.Value
4544
4545
4546 if s.curBlock.ID != s.f.Entry.ID {
4547
4548
4549
4550 if t.HasPointers() {
4551 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4552 }
4553 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
4554 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
4555 } else {
4556
4557
4558
4559 if t.HasPointers() {
4560 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
4561 }
4562 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
4563 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
4564 }
4565
4566
4567
4568
4569
4570 temp.SetNeedzero(true)
4571
4572
4573 s.store(t, addrTemp, val)
4574 return addrTemp
4575 }
4576
4577
4578
4579
4580
4581 func (s *state) openDeferExit() {
4582 deferExit := s.f.NewBlock(ssa.BlockPlain)
4583 s.endBlock().AddEdgeTo(deferExit)
4584 s.startBlock(deferExit)
4585 s.lastDeferExit = deferExit
4586 s.lastDeferCount = len(s.openDefers)
4587 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
4588
4589 for i := len(s.openDefers) - 1; i >= 0; i-- {
4590 r := s.openDefers[i]
4591 bCond := s.f.NewBlock(ssa.BlockPlain)
4592 bEnd := s.f.NewBlock(ssa.BlockPlain)
4593
4594 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
4595
4596
4597 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
4598 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
4599 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
4600 b := s.endBlock()
4601 b.Kind = ssa.BlockIf
4602 b.SetControl(eqVal)
4603 b.AddEdgeTo(bEnd)
4604 b.AddEdgeTo(bCond)
4605 bCond.AddEdgeTo(bEnd)
4606 s.startBlock(bCond)
4607
4608
4609
4610 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
4611 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
4612 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
4613
4614
4615 s.vars[deferBitsVar] = maskedval
4616
4617
4618
4619
4620 fn := r.n.Fun
4621 stksize := fn.Type().ArgWidth()
4622 var callArgs []*ssa.Value
4623 var call *ssa.Value
4624 if r.closure != nil {
4625 v := s.load(r.closure.Type.Elem(), r.closure)
4626 s.maybeNilCheckClosure(v, callDefer)
4627 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
4628 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4629 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
4630 } else {
4631 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
4632 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4633 }
4634 callArgs = append(callArgs, s.mem())
4635 call.AddArgs(callArgs...)
4636 call.AuxInt = stksize
4637 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
4638
4639
4640
4641
4642 if r.closureNode != nil {
4643 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
4644 }
4645
4646 s.endBlock()
4647 s.startBlock(bEnd)
4648 }
4649 }
4650
4651 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
4652 return s.call(n, k, false, nil)
4653 }
4654
4655 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
4656 return s.call(n, k, true, nil)
4657 }
4658
4659
4660
4661 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
4662 s.prevCall = nil
4663 var calleeLSym *obj.LSym
4664 var closure *ssa.Value
4665 var codeptr *ssa.Value
4666 var dextra *ssa.Value
4667 var rcvr *ssa.Value
4668 fn := n.Fun
4669 var ACArgs []*types.Type
4670 var ACResults []*types.Type
4671 var callArgs []*ssa.Value
4672
4673 callABI := s.f.ABIDefault
4674
4675 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
4676 s.Fatalf("go/defer call with arguments: %v", n)
4677 }
4678
4679 isCallDeferRangeFunc := false
4680
4681 switch n.Op() {
4682 case ir.OCALLFUNC:
4683 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
4684 fn := fn.(*ir.Name)
4685 calleeLSym = callTargetLSym(fn)
4686 if buildcfg.Experiment.RegabiArgs {
4687
4688
4689
4690
4691
4692 if fn.Func != nil {
4693 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
4694 }
4695 } else {
4696
4697 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
4698 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
4699 if inRegistersImported || inRegistersSamePackage {
4700 callABI = s.f.ABI1
4701 }
4702 }
4703 if fn := n.Fun.Sym().Name; n.Fun.Sym().Pkg == ir.Pkgs.Runtime && fn == "deferrangefunc" {
4704 isCallDeferRangeFunc = true
4705 }
4706 break
4707 }
4708 closure = s.expr(fn)
4709 if k != callDefer && k != callDeferStack {
4710
4711
4712 s.maybeNilCheckClosure(closure, k)
4713 }
4714 case ir.OCALLINTER:
4715 if fn.Op() != ir.ODOTINTER {
4716 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
4717 }
4718 fn := fn.(*ir.SelectorExpr)
4719 var iclosure *ssa.Value
4720 iclosure, rcvr = s.getClosureAndRcvr(fn)
4721 if k == callNormal {
4722 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
4723 } else {
4724 closure = iclosure
4725 }
4726 }
4727 if deferExtra != nil {
4728 dextra = s.expr(deferExtra)
4729 }
4730
4731 params := callABI.ABIAnalyze(n.Fun.Type(), false )
4732 types.CalcSize(fn.Type())
4733 stksize := params.ArgWidth()
4734
4735 res := n.Fun.Type().Results()
4736 if k == callNormal || k == callTail {
4737 for _, p := range params.OutParams() {
4738 ACResults = append(ACResults, p.Type)
4739 }
4740 }
4741
4742 var call *ssa.Value
4743 if k == callDeferStack {
4744 if stksize != 0 {
4745 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
4746 }
4747
4748 t := deferstruct()
4749 n, addr := s.temp(n.Pos(), t)
4750 n.SetNonMergeable(true)
4751 s.store(closure.Type,
4752 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
4753 closure)
4754
4755
4756 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4757 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4758 callArgs = append(callArgs, addr, s.mem())
4759 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4760 call.AddArgs(callArgs...)
4761 call.AuxInt = int64(types.PtrSize)
4762 } else {
4763
4764
4765 argStart := base.Ctxt.Arch.FixedFrameSize
4766
4767 if k != callNormal && k != callTail {
4768
4769 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
4770 callArgs = append(callArgs, closure)
4771 stksize += int64(types.PtrSize)
4772 argStart += int64(types.PtrSize)
4773 if dextra != nil {
4774
4775 ACArgs = append(ACArgs, types.Types[types.TINTER])
4776 callArgs = append(callArgs, dextra)
4777 stksize += 2 * int64(types.PtrSize)
4778 argStart += 2 * int64(types.PtrSize)
4779 }
4780 }
4781
4782
4783 if rcvr != nil {
4784 callArgs = append(callArgs, rcvr)
4785 }
4786
4787
4788 t := n.Fun.Type()
4789 args := n.Args
4790
4791 for _, p := range params.InParams() {
4792 ACArgs = append(ACArgs, p.Type)
4793 }
4794
4795
4796
4797
4798 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
4799 b := s.endBlock()
4800 b.Kind = ssa.BlockPlain
4801 curb := s.f.NewBlock(ssa.BlockPlain)
4802 b.AddEdgeTo(curb)
4803 s.startBlock(curb)
4804 }
4805
4806 for i, n := range args {
4807 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
4808 }
4809
4810 callArgs = append(callArgs, s.mem())
4811
4812
4813 switch {
4814 case k == callDefer:
4815 sym := ir.Syms.Deferproc
4816 if dextra != nil {
4817 sym = ir.Syms.Deferprocat
4818 }
4819 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4820 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4821 case k == callGo:
4822 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
4823 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4824 case closure != nil:
4825
4826
4827
4828
4829
4830 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
4831 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
4832 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
4833 case codeptr != nil:
4834
4835 aux := ssa.InterfaceAuxCall(params)
4836 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
4837 case calleeLSym != nil:
4838 aux := ssa.StaticAuxCall(calleeLSym, params)
4839 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
4840 if k == callTail {
4841 call.Op = ssa.OpTailLECall
4842 stksize = 0
4843 }
4844 default:
4845 s.Fatalf("bad call type %v %v", n.Op(), n)
4846 }
4847 call.AddArgs(callArgs...)
4848 call.AuxInt = stksize
4849 }
4850 s.prevCall = call
4851 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
4852
4853 for _, v := range n.KeepAlive {
4854 if !v.Addrtaken() {
4855 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
4856 }
4857 switch v.Class {
4858 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
4859 default:
4860 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
4861 }
4862 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
4863 }
4864
4865
4866 if k == callDefer || k == callDeferStack || isCallDeferRangeFunc {
4867 b := s.endBlock()
4868 b.Kind = ssa.BlockDefer
4869 b.SetControl(call)
4870 bNext := s.f.NewBlock(ssa.BlockPlain)
4871 b.AddEdgeTo(bNext)
4872 r := s.f.DeferReturn
4873 if r == nil {
4874 r = s.f.NewBlock(ssa.BlockPlain)
4875 s.startBlock(r)
4876 s.exit()
4877 s.f.DeferReturn = r
4878 }
4879 b.AddEdgeTo(r)
4880 b.Likely = ssa.BranchLikely
4881 s.startBlock(bNext)
4882 }
4883
4884 if len(res) == 0 || k != callNormal {
4885
4886 return nil
4887 }
4888 fp := res[0]
4889 if returnResultAddr {
4890 return s.resultAddrOfCall(call, 0, fp.Type)
4891 }
4892 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
4893 }
4894
4895
4896
4897 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
4898 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
4899
4900
4901 s.nilCheck(closure)
4902 }
4903 }
4904
4905
4906
4907 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
4908 i := s.expr(fn.X)
4909 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
4910 s.nilCheck(itab)
4911 itabidx := fn.Offset() + rttype.ITab.OffsetOf("Fun")
4912 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
4913 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
4914 return closure, rcvr
4915 }
4916
4917
4918
4919 func etypesign(e types.Kind) int8 {
4920 switch e {
4921 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
4922 return -1
4923 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
4924 return +1
4925 }
4926 return 0
4927 }
4928
4929
4930
4931 func (s *state) addr(n ir.Node) *ssa.Value {
4932 if n.Op() != ir.ONAME {
4933 s.pushLine(n.Pos())
4934 defer s.popLine()
4935 }
4936
4937 if s.canSSA(n) {
4938 s.Fatalf("addr of canSSA expression: %+v", n)
4939 }
4940
4941 t := types.NewPtr(n.Type())
4942 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
4943 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
4944
4945 if offset != 0 {
4946 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
4947 }
4948 return v
4949 }
4950 switch n.Op() {
4951 case ir.OLINKSYMOFFSET:
4952 no := n.(*ir.LinksymOffsetExpr)
4953 return linksymOffset(no.Linksym, no.Offset_)
4954 case ir.ONAME:
4955 n := n.(*ir.Name)
4956 if n.Heapaddr != nil {
4957 return s.expr(n.Heapaddr)
4958 }
4959 switch n.Class {
4960 case ir.PEXTERN:
4961
4962 return linksymOffset(n.Linksym(), 0)
4963 case ir.PPARAM:
4964
4965 v := s.decladdrs[n]
4966 if v != nil {
4967 return v
4968 }
4969 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
4970 return nil
4971 case ir.PAUTO:
4972 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
4973
4974 case ir.PPARAMOUT:
4975
4976
4977 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
4978 default:
4979 s.Fatalf("variable address class %v not implemented", n.Class)
4980 return nil
4981 }
4982 case ir.ORESULT:
4983
4984 n := n.(*ir.ResultExpr)
4985 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
4986 case ir.OINDEX:
4987 n := n.(*ir.IndexExpr)
4988 if n.X.Type().IsSlice() {
4989 a := s.expr(n.X)
4990 i := s.expr(n.Index)
4991 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
4992 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
4993 p := s.newValue1(ssa.OpSlicePtr, t, a)
4994 return s.newValue2(ssa.OpPtrIndex, t, p, i)
4995 } else {
4996 a := s.addr(n.X)
4997 i := s.expr(n.Index)
4998 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
4999 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5000 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5001 }
5002 case ir.ODEREF:
5003 n := n.(*ir.StarExpr)
5004 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5005 case ir.ODOT:
5006 n := n.(*ir.SelectorExpr)
5007 p := s.addr(n.X)
5008 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5009 case ir.ODOTPTR:
5010 n := n.(*ir.SelectorExpr)
5011 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5012 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5013 case ir.OCONVNOP:
5014 n := n.(*ir.ConvExpr)
5015 if n.Type() == n.X.Type() {
5016 return s.addr(n.X)
5017 }
5018 addr := s.addr(n.X)
5019 return s.newValue1(ssa.OpCopy, t, addr)
5020 case ir.OCALLFUNC, ir.OCALLINTER:
5021 n := n.(*ir.CallExpr)
5022 return s.callAddr(n, callNormal)
5023 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5024 var v *ssa.Value
5025 if n.Op() == ir.ODOTTYPE {
5026 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5027 } else {
5028 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5029 }
5030 if v.Op != ssa.OpLoad {
5031 s.Fatalf("dottype of non-load")
5032 }
5033 if v.Args[1] != s.mem() {
5034 s.Fatalf("memory no longer live from dottype load")
5035 }
5036 return v.Args[0]
5037 default:
5038 s.Fatalf("unhandled addr %v", n.Op())
5039 return nil
5040 }
5041 }
5042
5043
5044
5045 func (s *state) canSSA(n ir.Node) bool {
5046 if base.Flag.N != 0 {
5047 return false
5048 }
5049 for {
5050 nn := n
5051 if nn.Op() == ir.ODOT {
5052 nn := nn.(*ir.SelectorExpr)
5053 n = nn.X
5054 continue
5055 }
5056 if nn.Op() == ir.OINDEX {
5057 nn := nn.(*ir.IndexExpr)
5058 if nn.X.Type().IsArray() {
5059 n = nn.X
5060 continue
5061 }
5062 }
5063 break
5064 }
5065 if n.Op() != ir.ONAME {
5066 return false
5067 }
5068 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5069 }
5070
5071 func (s *state) canSSAName(name *ir.Name) bool {
5072 if name.Addrtaken() || !name.OnStack() {
5073 return false
5074 }
5075 switch name.Class {
5076 case ir.PPARAMOUT:
5077 if s.hasdefer {
5078
5079
5080
5081
5082
5083 return false
5084 }
5085 if s.cgoUnsafeArgs {
5086
5087
5088 return false
5089 }
5090 }
5091 return true
5092
5093 }
5094
5095
5096 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5097 p := s.expr(n)
5098 if bounded || n.NonNil() {
5099 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5100 s.f.Warnl(lineno, "removed nil check")
5101 }
5102 return p
5103 }
5104 p = s.nilCheck(p)
5105 return p
5106 }
5107
5108
5109
5110
5111
5112
5113 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5114 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5115 return ptr
5116 }
5117 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5118 }
5119
5120
5121
5122
5123
5124
5125
5126 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5127 idx = s.extendIndex(idx, len, kind, bounded)
5128
5129 if bounded || base.Flag.B != 0 {
5130
5131
5132
5133
5134
5135
5136
5137
5138
5139
5140
5141
5142
5143
5144
5145
5146
5147
5148
5149
5150 return idx
5151 }
5152
5153 bNext := s.f.NewBlock(ssa.BlockPlain)
5154 bPanic := s.f.NewBlock(ssa.BlockExit)
5155
5156 if !idx.Type.IsSigned() {
5157 switch kind {
5158 case ssa.BoundsIndex:
5159 kind = ssa.BoundsIndexU
5160 case ssa.BoundsSliceAlen:
5161 kind = ssa.BoundsSliceAlenU
5162 case ssa.BoundsSliceAcap:
5163 kind = ssa.BoundsSliceAcapU
5164 case ssa.BoundsSliceB:
5165 kind = ssa.BoundsSliceBU
5166 case ssa.BoundsSlice3Alen:
5167 kind = ssa.BoundsSlice3AlenU
5168 case ssa.BoundsSlice3Acap:
5169 kind = ssa.BoundsSlice3AcapU
5170 case ssa.BoundsSlice3B:
5171 kind = ssa.BoundsSlice3BU
5172 case ssa.BoundsSlice3C:
5173 kind = ssa.BoundsSlice3CU
5174 }
5175 }
5176
5177 var cmp *ssa.Value
5178 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5179 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5180 } else {
5181 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5182 }
5183 b := s.endBlock()
5184 b.Kind = ssa.BlockIf
5185 b.SetControl(cmp)
5186 b.Likely = ssa.BranchLikely
5187 b.AddEdgeTo(bNext)
5188 b.AddEdgeTo(bPanic)
5189
5190 s.startBlock(bPanic)
5191 if Arch.LinkArch.Family == sys.Wasm {
5192
5193
5194 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5195 } else {
5196 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5197 s.endBlock().SetControl(mem)
5198 }
5199 s.startBlock(bNext)
5200
5201
5202 if base.Flag.Cfg.SpectreIndex {
5203 op := ssa.OpSpectreIndex
5204 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5205 op = ssa.OpSpectreSliceIndex
5206 }
5207 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5208 }
5209
5210 return idx
5211 }
5212
5213
5214 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5215 b := s.endBlock()
5216 b.Kind = ssa.BlockIf
5217 b.SetControl(cmp)
5218 b.Likely = ssa.BranchLikely
5219 bNext := s.f.NewBlock(ssa.BlockPlain)
5220 line := s.peekPos()
5221 pos := base.Ctxt.PosTable.Pos(line)
5222 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5223 bPanic := s.panics[fl]
5224 if bPanic == nil {
5225 bPanic = s.f.NewBlock(ssa.BlockPlain)
5226 s.panics[fl] = bPanic
5227 s.startBlock(bPanic)
5228
5229
5230 s.rtcall(fn, false, nil)
5231 }
5232 b.AddEdgeTo(bNext)
5233 b.AddEdgeTo(bPanic)
5234 s.startBlock(bNext)
5235 }
5236
5237 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5238 needcheck := true
5239 switch b.Op {
5240 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5241 if b.AuxInt != 0 {
5242 needcheck = false
5243 }
5244 }
5245 if needcheck {
5246
5247 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5248 s.check(cmp, ir.Syms.Panicdivide)
5249 }
5250 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5251 }
5252
5253
5254
5255
5256
5257 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5258 s.prevCall = nil
5259
5260 off := base.Ctxt.Arch.FixedFrameSize
5261 var callArgs []*ssa.Value
5262 var callArgTypes []*types.Type
5263
5264 for _, arg := range args {
5265 t := arg.Type
5266 off = types.RoundUp(off, t.Alignment())
5267 size := t.Size()
5268 callArgs = append(callArgs, arg)
5269 callArgTypes = append(callArgTypes, t)
5270 off += size
5271 }
5272 off = types.RoundUp(off, int64(types.RegSize))
5273
5274
5275 var call *ssa.Value
5276 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5277 callArgs = append(callArgs, s.mem())
5278 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5279 call.AddArgs(callArgs...)
5280 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5281
5282 if !returns {
5283
5284 b := s.endBlock()
5285 b.Kind = ssa.BlockExit
5286 b.SetControl(call)
5287 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5288 if len(results) > 0 {
5289 s.Fatalf("panic call can't have results")
5290 }
5291 return nil
5292 }
5293
5294
5295 res := make([]*ssa.Value, len(results))
5296 for i, t := range results {
5297 off = types.RoundUp(off, t.Alignment())
5298 res[i] = s.resultOfCall(call, int64(i), t)
5299 off += t.Size()
5300 }
5301 off = types.RoundUp(off, int64(types.PtrSize))
5302
5303
5304 call.AuxInt = off
5305
5306 return res
5307 }
5308
5309
5310 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5311 s.instrument(t, left, instrumentWrite)
5312
5313 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5314
5315 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5316 return
5317 }
5318
5319
5320
5321
5322
5323
5324 s.storeTypeScalars(t, left, right, skip)
5325 if skip&skipPtr == 0 && t.HasPointers() {
5326 s.storeTypePtrs(t, left, right)
5327 }
5328 }
5329
5330
5331 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5332 switch {
5333 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5334 s.store(t, left, right)
5335 case t.IsPtrShaped():
5336 if t.IsPtr() && t.Elem().NotInHeap() {
5337 s.store(t, left, right)
5338 }
5339
5340 case t.IsString():
5341 if skip&skipLen != 0 {
5342 return
5343 }
5344 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5345 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5346 s.store(types.Types[types.TINT], lenAddr, len)
5347 case t.IsSlice():
5348 if skip&skipLen == 0 {
5349 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5350 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5351 s.store(types.Types[types.TINT], lenAddr, len)
5352 }
5353 if skip&skipCap == 0 {
5354 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5355 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5356 s.store(types.Types[types.TINT], capAddr, cap)
5357 }
5358 case t.IsInterface():
5359
5360 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5361 s.store(types.Types[types.TUINTPTR], left, itab)
5362 case t.IsStruct():
5363 n := t.NumFields()
5364 for i := 0; i < n; i++ {
5365 ft := t.FieldType(i)
5366 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5367 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5368 s.storeTypeScalars(ft, addr, val, 0)
5369 }
5370 case t.IsArray() && t.NumElem() == 0:
5371
5372 case t.IsArray() && t.NumElem() == 1:
5373 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
5374 default:
5375 s.Fatalf("bad write barrier type %v", t)
5376 }
5377 }
5378
5379
5380 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
5381 switch {
5382 case t.IsPtrShaped():
5383 if t.IsPtr() && t.Elem().NotInHeap() {
5384 break
5385 }
5386 s.store(t, left, right)
5387 case t.IsString():
5388 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
5389 s.store(s.f.Config.Types.BytePtr, left, ptr)
5390 case t.IsSlice():
5391 elType := types.NewPtr(t.Elem())
5392 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
5393 s.store(elType, left, ptr)
5394 case t.IsInterface():
5395
5396 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
5397 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
5398 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
5399 case t.IsStruct():
5400 n := t.NumFields()
5401 for i := 0; i < n; i++ {
5402 ft := t.FieldType(i)
5403 if !ft.HasPointers() {
5404 continue
5405 }
5406 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5407 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5408 s.storeTypePtrs(ft, addr, val)
5409 }
5410 case t.IsArray() && t.NumElem() == 0:
5411
5412 case t.IsArray() && t.NumElem() == 1:
5413 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
5414 default:
5415 s.Fatalf("bad write barrier type %v", t)
5416 }
5417 }
5418
5419
5420 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
5421 var a *ssa.Value
5422 if !ssa.CanSSA(t) {
5423 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
5424 } else {
5425 a = s.expr(n)
5426 }
5427 return a
5428 }
5429
5430 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
5431 pt := types.NewPtr(t)
5432 var addr *ssa.Value
5433 if base == s.sp {
5434
5435 addr = s.constOffPtrSP(pt, off)
5436 } else {
5437 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
5438 }
5439
5440 if !ssa.CanSSA(t) {
5441 a := s.addr(n)
5442 s.move(t, addr, a)
5443 return
5444 }
5445
5446 a := s.expr(n)
5447 s.storeType(t, addr, a, 0, false)
5448 }
5449
5450
5451
5452
5453 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
5454 t := v.Type
5455 var ptr, len, cap *ssa.Value
5456 switch {
5457 case t.IsSlice():
5458 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
5459 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
5460 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
5461 case t.IsString():
5462 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
5463 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
5464 cap = len
5465 case t.IsPtr():
5466 if !t.Elem().IsArray() {
5467 s.Fatalf("bad ptr to array in slice %v\n", t)
5468 }
5469 nv := s.nilCheck(v)
5470 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
5471 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
5472 cap = len
5473 default:
5474 s.Fatalf("bad type in slice %v\n", t)
5475 }
5476
5477
5478 if i == nil {
5479 i = s.constInt(types.Types[types.TINT], 0)
5480 }
5481 if j == nil {
5482 j = len
5483 }
5484 three := true
5485 if k == nil {
5486 three = false
5487 k = cap
5488 }
5489
5490
5491
5492
5493 if three {
5494 if k != cap {
5495 kind := ssa.BoundsSlice3Alen
5496 if t.IsSlice() {
5497 kind = ssa.BoundsSlice3Acap
5498 }
5499 k = s.boundsCheck(k, cap, kind, bounded)
5500 }
5501 if j != k {
5502 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
5503 }
5504 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
5505 } else {
5506 if j != k {
5507 kind := ssa.BoundsSliceAlen
5508 if t.IsSlice() {
5509 kind = ssa.BoundsSliceAcap
5510 }
5511 j = s.boundsCheck(j, k, kind, bounded)
5512 }
5513 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
5514 }
5515
5516
5517 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
5518 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
5519 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
5520
5521
5522
5523
5524
5525 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
5526 rcap := rlen
5527 if j != k && !t.IsString() {
5528 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
5529 }
5530
5531 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
5532
5533 return ptr, rlen, rcap
5534 }
5535
5536
5537
5538
5539
5540
5541
5542
5543
5544
5545
5546
5547
5548
5549
5550 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
5551
5552
5553 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
5554
5555
5556
5557 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
5558 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
5559
5560
5561 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
5562
5563 return rptr, rlen, rcap
5564 }
5565
5566 type u642fcvtTab struct {
5567 leq, cvt2F, and, rsh, or, add ssa.Op
5568 one func(*state, *types.Type, int64) *ssa.Value
5569 }
5570
5571 var u64_f64 = u642fcvtTab{
5572 leq: ssa.OpLeq64,
5573 cvt2F: ssa.OpCvt64to64F,
5574 and: ssa.OpAnd64,
5575 rsh: ssa.OpRsh64Ux64,
5576 or: ssa.OpOr64,
5577 add: ssa.OpAdd64F,
5578 one: (*state).constInt64,
5579 }
5580
5581 var u64_f32 = u642fcvtTab{
5582 leq: ssa.OpLeq64,
5583 cvt2F: ssa.OpCvt64to32F,
5584 and: ssa.OpAnd64,
5585 rsh: ssa.OpRsh64Ux64,
5586 or: ssa.OpOr64,
5587 add: ssa.OpAdd32F,
5588 one: (*state).constInt64,
5589 }
5590
5591 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5592 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
5593 }
5594
5595 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5596 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
5597 }
5598
5599 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5600
5601
5602
5603
5604
5605
5606
5607
5608
5609
5610
5611
5612
5613
5614
5615
5616
5617
5618
5619
5620
5621
5622
5623
5624 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
5625 b := s.endBlock()
5626 b.Kind = ssa.BlockIf
5627 b.SetControl(cmp)
5628 b.Likely = ssa.BranchLikely
5629
5630 bThen := s.f.NewBlock(ssa.BlockPlain)
5631 bElse := s.f.NewBlock(ssa.BlockPlain)
5632 bAfter := s.f.NewBlock(ssa.BlockPlain)
5633
5634 b.AddEdgeTo(bThen)
5635 s.startBlock(bThen)
5636 a0 := s.newValue1(cvttab.cvt2F, tt, x)
5637 s.vars[n] = a0
5638 s.endBlock()
5639 bThen.AddEdgeTo(bAfter)
5640
5641 b.AddEdgeTo(bElse)
5642 s.startBlock(bElse)
5643 one := cvttab.one(s, ft, 1)
5644 y := s.newValue2(cvttab.and, ft, x, one)
5645 z := s.newValue2(cvttab.rsh, ft, x, one)
5646 z = s.newValue2(cvttab.or, ft, z, y)
5647 a := s.newValue1(cvttab.cvt2F, tt, z)
5648 a1 := s.newValue2(cvttab.add, tt, a, a)
5649 s.vars[n] = a1
5650 s.endBlock()
5651 bElse.AddEdgeTo(bAfter)
5652
5653 s.startBlock(bAfter)
5654 return s.variable(n, n.Type())
5655 }
5656
5657 type u322fcvtTab struct {
5658 cvtI2F, cvtF2F ssa.Op
5659 }
5660
5661 var u32_f64 = u322fcvtTab{
5662 cvtI2F: ssa.OpCvt32to64F,
5663 cvtF2F: ssa.OpCopy,
5664 }
5665
5666 var u32_f32 = u322fcvtTab{
5667 cvtI2F: ssa.OpCvt32to32F,
5668 cvtF2F: ssa.OpCvt64Fto32F,
5669 }
5670
5671 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5672 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
5673 }
5674
5675 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5676 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
5677 }
5678
5679 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5680
5681
5682
5683
5684
5685 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
5686 b := s.endBlock()
5687 b.Kind = ssa.BlockIf
5688 b.SetControl(cmp)
5689 b.Likely = ssa.BranchLikely
5690
5691 bThen := s.f.NewBlock(ssa.BlockPlain)
5692 bElse := s.f.NewBlock(ssa.BlockPlain)
5693 bAfter := s.f.NewBlock(ssa.BlockPlain)
5694
5695 b.AddEdgeTo(bThen)
5696 s.startBlock(bThen)
5697 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
5698 s.vars[n] = a0
5699 s.endBlock()
5700 bThen.AddEdgeTo(bAfter)
5701
5702 b.AddEdgeTo(bElse)
5703 s.startBlock(bElse)
5704 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
5705 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
5706 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
5707 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
5708
5709 s.vars[n] = a3
5710 s.endBlock()
5711 bElse.AddEdgeTo(bAfter)
5712
5713 s.startBlock(bAfter)
5714 return s.variable(n, n.Type())
5715 }
5716
5717
5718 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
5719 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
5720 s.Fatalf("node must be a map or a channel")
5721 }
5722 if n.X.Type().IsChan() && n.Op() == ir.OLEN {
5723 s.Fatalf("cannot inline len(chan)")
5724 }
5725 if n.X.Type().IsChan() && n.Op() == ir.OCAP {
5726 s.Fatalf("cannot inline cap(chan)")
5727 }
5728 if n.X.Type().IsMap() && n.Op() == ir.OCAP {
5729 s.Fatalf("cannot inline cap(map)")
5730 }
5731
5732
5733
5734
5735
5736
5737
5738
5739 lenType := n.Type()
5740 nilValue := s.constNil(types.Types[types.TUINTPTR])
5741 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
5742 b := s.endBlock()
5743 b.Kind = ssa.BlockIf
5744 b.SetControl(cmp)
5745 b.Likely = ssa.BranchUnlikely
5746
5747 bThen := s.f.NewBlock(ssa.BlockPlain)
5748 bElse := s.f.NewBlock(ssa.BlockPlain)
5749 bAfter := s.f.NewBlock(ssa.BlockPlain)
5750
5751
5752 b.AddEdgeTo(bThen)
5753 s.startBlock(bThen)
5754 s.vars[n] = s.zeroVal(lenType)
5755 s.endBlock()
5756 bThen.AddEdgeTo(bAfter)
5757
5758 b.AddEdgeTo(bElse)
5759 s.startBlock(bElse)
5760 switch n.Op() {
5761 case ir.OLEN:
5762 if buildcfg.Experiment.SwissMap && n.X.Type().IsMap() {
5763
5764 loadType := reflectdata.SwissMapType().Field(0).Type
5765 load := s.load(loadType, x)
5766 s.vars[n] = s.conv(nil, load, loadType, lenType)
5767 } else {
5768
5769 s.vars[n] = s.load(lenType, x)
5770 }
5771 case ir.OCAP:
5772
5773 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
5774 s.vars[n] = s.load(lenType, sw)
5775 default:
5776 s.Fatalf("op must be OLEN or OCAP")
5777 }
5778 s.endBlock()
5779 bElse.AddEdgeTo(bAfter)
5780
5781 s.startBlock(bAfter)
5782 return s.variable(n, lenType)
5783 }
5784
5785 type f2uCvtTab struct {
5786 ltf, cvt2U, subf, or ssa.Op
5787 floatValue func(*state, *types.Type, float64) *ssa.Value
5788 intValue func(*state, *types.Type, int64) *ssa.Value
5789 cutoff uint64
5790 }
5791
5792 var f32_u64 = f2uCvtTab{
5793 ltf: ssa.OpLess32F,
5794 cvt2U: ssa.OpCvt32Fto64,
5795 subf: ssa.OpSub32F,
5796 or: ssa.OpOr64,
5797 floatValue: (*state).constFloat32,
5798 intValue: (*state).constInt64,
5799 cutoff: 1 << 63,
5800 }
5801
5802 var f64_u64 = f2uCvtTab{
5803 ltf: ssa.OpLess64F,
5804 cvt2U: ssa.OpCvt64Fto64,
5805 subf: ssa.OpSub64F,
5806 or: ssa.OpOr64,
5807 floatValue: (*state).constFloat64,
5808 intValue: (*state).constInt64,
5809 cutoff: 1 << 63,
5810 }
5811
5812 var f32_u32 = f2uCvtTab{
5813 ltf: ssa.OpLess32F,
5814 cvt2U: ssa.OpCvt32Fto32,
5815 subf: ssa.OpSub32F,
5816 or: ssa.OpOr32,
5817 floatValue: (*state).constFloat32,
5818 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5819 cutoff: 1 << 31,
5820 }
5821
5822 var f64_u32 = f2uCvtTab{
5823 ltf: ssa.OpLess64F,
5824 cvt2U: ssa.OpCvt64Fto32,
5825 subf: ssa.OpSub64F,
5826 or: ssa.OpOr32,
5827 floatValue: (*state).constFloat64,
5828 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
5829 cutoff: 1 << 31,
5830 }
5831
5832 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5833 return s.floatToUint(&f32_u64, n, x, ft, tt)
5834 }
5835 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5836 return s.floatToUint(&f64_u64, n, x, ft, tt)
5837 }
5838
5839 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5840 return s.floatToUint(&f32_u32, n, x, ft, tt)
5841 }
5842
5843 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5844 return s.floatToUint(&f64_u32, n, x, ft, tt)
5845 }
5846
5847 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
5848
5849
5850
5851
5852
5853
5854
5855
5856 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
5857 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
5858 b := s.endBlock()
5859 b.Kind = ssa.BlockIf
5860 b.SetControl(cmp)
5861 b.Likely = ssa.BranchLikely
5862
5863 bThen := s.f.NewBlock(ssa.BlockPlain)
5864 bElse := s.f.NewBlock(ssa.BlockPlain)
5865 bAfter := s.f.NewBlock(ssa.BlockPlain)
5866
5867 b.AddEdgeTo(bThen)
5868 s.startBlock(bThen)
5869 a0 := s.newValue1(cvttab.cvt2U, tt, x)
5870 s.vars[n] = a0
5871 s.endBlock()
5872 bThen.AddEdgeTo(bAfter)
5873
5874 b.AddEdgeTo(bElse)
5875 s.startBlock(bElse)
5876 y := s.newValue2(cvttab.subf, ft, x, cutoff)
5877 y = s.newValue1(cvttab.cvt2U, tt, y)
5878 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
5879 a1 := s.newValue2(cvttab.or, tt, y, z)
5880 s.vars[n] = a1
5881 s.endBlock()
5882 bElse.AddEdgeTo(bAfter)
5883
5884 s.startBlock(bAfter)
5885 return s.variable(n, n.Type())
5886 }
5887
5888
5889
5890
5891 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5892 iface := s.expr(n.X)
5893 target := s.reflectType(n.Type())
5894 var targetItab *ssa.Value
5895 if n.ITab != nil {
5896 targetItab = s.expr(n.ITab)
5897 }
5898 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
5899 }
5900
5901 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
5902 iface := s.expr(n.X)
5903 var source, target, targetItab *ssa.Value
5904 if n.SrcRType != nil {
5905 source = s.expr(n.SrcRType)
5906 }
5907 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
5908 byteptr := s.f.Config.Types.BytePtr
5909 targetItab = s.expr(n.ITab)
5910
5911
5912 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), targetItab))
5913 } else {
5914 target = s.expr(n.RType)
5915 }
5916 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
5917 }
5918
5919
5920
5921
5922
5923
5924
5925
5926
5927 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
5928 typs := s.f.Config.Types
5929 byteptr := typs.BytePtr
5930 if dst.IsInterface() {
5931 if dst.IsEmptyInterface() {
5932
5933
5934 if base.Debug.TypeAssert > 0 {
5935 base.WarnfAt(pos, "type assertion inlined")
5936 }
5937
5938
5939 itab := s.newValue1(ssa.OpITab, byteptr, iface)
5940
5941 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
5942
5943 if src.IsEmptyInterface() && commaok {
5944
5945 return iface, cond
5946 }
5947
5948
5949 b := s.endBlock()
5950 b.Kind = ssa.BlockIf
5951 b.SetControl(cond)
5952 b.Likely = ssa.BranchLikely
5953 bOk := s.f.NewBlock(ssa.BlockPlain)
5954 bFail := s.f.NewBlock(ssa.BlockPlain)
5955 b.AddEdgeTo(bOk)
5956 b.AddEdgeTo(bFail)
5957
5958 if !commaok {
5959
5960 s.startBlock(bFail)
5961 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
5962
5963
5964 s.startBlock(bOk)
5965 if src.IsEmptyInterface() {
5966 res = iface
5967 return
5968 }
5969
5970 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5971 typ := s.load(byteptr, off)
5972 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5973 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
5974 return
5975 }
5976
5977 s.startBlock(bOk)
5978
5979
5980 off := s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab)
5981 s.vars[typVar] = s.load(byteptr, off)
5982 s.endBlock()
5983
5984
5985 s.startBlock(bFail)
5986 s.vars[typVar] = itab
5987 s.endBlock()
5988
5989
5990 bEnd := s.f.NewBlock(ssa.BlockPlain)
5991 bOk.AddEdgeTo(bEnd)
5992 bFail.AddEdgeTo(bEnd)
5993 s.startBlock(bEnd)
5994 idata := s.newValue1(ssa.OpIData, byteptr, iface)
5995 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
5996 resok = cond
5997 delete(s.vars, typVar)
5998 return
5999 }
6000
6001 if base.Debug.TypeAssert > 0 {
6002 base.WarnfAt(pos, "type assertion not inlined")
6003 }
6004
6005 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6006 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6007
6008
6009 bNil := s.f.NewBlock(ssa.BlockPlain)
6010 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6011 bMerge := s.f.NewBlock(ssa.BlockPlain)
6012 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6013 b := s.endBlock()
6014 b.Kind = ssa.BlockIf
6015 b.SetControl(cond)
6016 b.Likely = ssa.BranchLikely
6017 b.AddEdgeTo(bNonNil)
6018 b.AddEdgeTo(bNil)
6019
6020 s.startBlock(bNil)
6021 if commaok {
6022 s.vars[typVar] = itab
6023 b := s.endBlock()
6024 b.AddEdgeTo(bMerge)
6025 } else {
6026
6027 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6028 }
6029
6030
6031 s.startBlock(bNonNil)
6032 typ := itab
6033 if !src.IsEmptyInterface() {
6034 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, rttype.ITab.OffsetOf("Type"), itab))
6035 }
6036
6037
6038 var d *ssa.Value
6039 if descriptor != nil {
6040 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6041 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Family) {
6042
6043
6044 if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
6045 s.Fatalf("atomic load not available")
6046 }
6047
6048 var mul, and, add, zext ssa.Op
6049 if s.config.PtrSize == 4 {
6050 mul = ssa.OpMul32
6051 and = ssa.OpAnd32
6052 add = ssa.OpAdd32
6053 zext = ssa.OpCopy
6054 } else {
6055 mul = ssa.OpMul64
6056 and = ssa.OpAnd64
6057 add = ssa.OpAdd64
6058 zext = ssa.OpZeroExt32to64
6059 }
6060
6061 loopHead := s.f.NewBlock(ssa.BlockPlain)
6062 loopBody := s.f.NewBlock(ssa.BlockPlain)
6063 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6064 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6065
6066
6067
6068 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6069 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6070 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6071
6072
6073 var hash *ssa.Value
6074 if src.IsEmptyInterface() {
6075 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.Type.OffsetOf("Hash"), typ), s.mem())
6076 } else {
6077 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, rttype.ITab.OffsetOf("Hash"), itab), s.mem())
6078 }
6079 hash = s.newValue1(zext, typs.Uintptr, hash)
6080 s.vars[hashVar] = hash
6081
6082 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6083
6084 b := s.endBlock()
6085 b.AddEdgeTo(loopHead)
6086
6087
6088
6089 s.startBlock(loopHead)
6090 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6091 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6092 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6093 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6094
6095 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6096
6097
6098
6099 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6100 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6101 b = s.endBlock()
6102 b.Kind = ssa.BlockIf
6103 b.SetControl(cmp1)
6104 b.AddEdgeTo(cacheHit)
6105 b.AddEdgeTo(loopBody)
6106
6107
6108
6109 s.startBlock(loopBody)
6110 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6111 b = s.endBlock()
6112 b.Kind = ssa.BlockIf
6113 b.SetControl(cmp2)
6114 b.AddEdgeTo(cacheMiss)
6115 b.AddEdgeTo(loopHead)
6116
6117
6118
6119 s.startBlock(cacheHit)
6120 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6121 s.vars[typVar] = eItab
6122 b = s.endBlock()
6123 b.AddEdgeTo(bMerge)
6124
6125
6126 s.startBlock(cacheMiss)
6127 }
6128 }
6129
6130
6131 if descriptor != nil {
6132 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6133 } else {
6134 var fn *obj.LSym
6135 if commaok {
6136 fn = ir.Syms.AssertE2I2
6137 } else {
6138 fn = ir.Syms.AssertE2I
6139 }
6140 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6141 }
6142 s.vars[typVar] = itab
6143 b = s.endBlock()
6144 b.AddEdgeTo(bMerge)
6145
6146
6147 s.startBlock(bMerge)
6148 itab = s.variable(typVar, byteptr)
6149 var ok *ssa.Value
6150 if commaok {
6151 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6152 }
6153 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6154 }
6155
6156 if base.Debug.TypeAssert > 0 {
6157 base.WarnfAt(pos, "type assertion inlined")
6158 }
6159
6160
6161 direct := types.IsDirectIface(dst)
6162 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6163 if base.Debug.TypeAssert > 0 {
6164 base.WarnfAt(pos, "type assertion inlined")
6165 }
6166 var wantedFirstWord *ssa.Value
6167 if src.IsEmptyInterface() {
6168
6169 wantedFirstWord = target
6170 } else {
6171
6172 wantedFirstWord = targetItab
6173 }
6174
6175 var tmp ir.Node
6176 var addr *ssa.Value
6177 if commaok && !ssa.CanSSA(dst) {
6178
6179
6180 tmp, addr = s.temp(pos, dst)
6181 }
6182
6183 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6184 b := s.endBlock()
6185 b.Kind = ssa.BlockIf
6186 b.SetControl(cond)
6187 b.Likely = ssa.BranchLikely
6188
6189 bOk := s.f.NewBlock(ssa.BlockPlain)
6190 bFail := s.f.NewBlock(ssa.BlockPlain)
6191 b.AddEdgeTo(bOk)
6192 b.AddEdgeTo(bFail)
6193
6194 if !commaok {
6195
6196 s.startBlock(bFail)
6197 taddr := source
6198 if taddr == nil {
6199 taddr = s.reflectType(src)
6200 }
6201 if src.IsEmptyInterface() {
6202 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6203 } else {
6204 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6205 }
6206
6207
6208 s.startBlock(bOk)
6209 if direct {
6210 return s.newValue1(ssa.OpIData, dst, iface), nil
6211 }
6212 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6213 return s.load(dst, p), nil
6214 }
6215
6216
6217
6218 bEnd := s.f.NewBlock(ssa.BlockPlain)
6219
6220
6221 valVar := ssaMarker("val")
6222
6223
6224 s.startBlock(bOk)
6225 if tmp == nil {
6226 if direct {
6227 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6228 } else {
6229 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6230 s.vars[valVar] = s.load(dst, p)
6231 }
6232 } else {
6233 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6234 s.move(dst, addr, p)
6235 }
6236 s.vars[okVar] = s.constBool(true)
6237 s.endBlock()
6238 bOk.AddEdgeTo(bEnd)
6239
6240
6241 s.startBlock(bFail)
6242 if tmp == nil {
6243 s.vars[valVar] = s.zeroVal(dst)
6244 } else {
6245 s.zero(dst, addr)
6246 }
6247 s.vars[okVar] = s.constBool(false)
6248 s.endBlock()
6249 bFail.AddEdgeTo(bEnd)
6250
6251
6252 s.startBlock(bEnd)
6253 if tmp == nil {
6254 res = s.variable(valVar, dst)
6255 delete(s.vars, valVar)
6256 } else {
6257 res = s.load(dst, addr)
6258 }
6259 resok = s.variable(okVar, types.Types[types.TBOOL])
6260 delete(s.vars, okVar)
6261 return res, resok
6262 }
6263
6264
6265 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6266 tmp := typecheck.TempAt(pos, s.curfn, t)
6267 if t.HasPointers() || (ssa.IsMergeCandidate(tmp) && t != deferstruct()) {
6268 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6269 }
6270 addr := s.addr(tmp)
6271 return tmp, addr
6272 }
6273
6274
6275 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6276 v := s.vars[n]
6277 if v != nil {
6278 return v
6279 }
6280 v = s.fwdVars[n]
6281 if v != nil {
6282 return v
6283 }
6284
6285 if s.curBlock == s.f.Entry {
6286
6287 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6288 }
6289
6290
6291 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6292 s.fwdVars[n] = v
6293 if n.Op() == ir.ONAME {
6294 s.addNamedValue(n.(*ir.Name), v)
6295 }
6296 return v
6297 }
6298
6299 func (s *state) mem() *ssa.Value {
6300 return s.variable(memVar, types.TypeMem)
6301 }
6302
6303 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6304 if n.Class == ir.Pxxx {
6305
6306 return
6307 }
6308 if ir.IsAutoTmp(n) {
6309
6310 return
6311 }
6312 if n.Class == ir.PPARAMOUT {
6313
6314
6315 return
6316 }
6317 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6318 values, ok := s.f.NamedValues[loc]
6319 if !ok {
6320 s.f.Names = append(s.f.Names, &loc)
6321 s.f.CanonicalLocalSlots[loc] = &loc
6322 }
6323 s.f.NamedValues[loc] = append(values, v)
6324 }
6325
6326
6327 type Branch struct {
6328 P *obj.Prog
6329 B *ssa.Block
6330 }
6331
6332
6333 type State struct {
6334 ABI obj.ABI
6335
6336 pp *objw.Progs
6337
6338
6339
6340 Branches []Branch
6341
6342
6343 JumpTables []*ssa.Block
6344
6345
6346 bstart []*obj.Prog
6347
6348 maxarg int64
6349
6350
6351
6352 livenessMap liveness.Map
6353
6354
6355
6356 partLiveArgs map[*ir.Name]bool
6357
6358
6359
6360
6361 lineRunStart *obj.Prog
6362
6363
6364 OnWasmStackSkipped int
6365 }
6366
6367 func (s *State) FuncInfo() *obj.FuncInfo {
6368 return s.pp.CurFunc.LSym.Func()
6369 }
6370
6371
6372 func (s *State) Prog(as obj.As) *obj.Prog {
6373 p := s.pp.Prog(as)
6374 if objw.LosesStmtMark(as) {
6375 return p
6376 }
6377
6378
6379 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6380 s.lineRunStart = p
6381 } else if p.Pos.IsStmt() == src.PosIsStmt {
6382 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6383 p.Pos = p.Pos.WithNotStmt()
6384 }
6385 return p
6386 }
6387
6388
6389 func (s *State) Pc() *obj.Prog {
6390 return s.pp.Next
6391 }
6392
6393
6394 func (s *State) SetPos(pos src.XPos) {
6395 s.pp.Pos = pos
6396 }
6397
6398
6399
6400
6401 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
6402 p := s.Prog(op)
6403 p.To.Type = obj.TYPE_BRANCH
6404 s.Branches = append(s.Branches, Branch{P: p, B: target})
6405 return p
6406 }
6407
6408
6409
6410
6411
6412
6413 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
6414 switch v.Op {
6415 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
6416
6417 s.SetPos(v.Pos.WithNotStmt())
6418 default:
6419 p := v.Pos
6420 if p != src.NoXPos {
6421
6422
6423
6424
6425 if p.IsStmt() != src.PosIsStmt {
6426 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
6427
6428
6429
6430
6431
6432
6433
6434
6435
6436
6437
6438
6439
6440 return
6441 }
6442 p = p.WithNotStmt()
6443
6444 }
6445 s.SetPos(p)
6446 } else {
6447 s.SetPos(s.pp.Pos.WithNotStmt())
6448 }
6449 }
6450 }
6451
6452
6453 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
6454 ft := e.curfn.Type()
6455 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
6456 return
6457 }
6458
6459 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
6460 x.Set(obj.AttrContentAddressable, true)
6461 e.curfn.LSym.Func().ArgInfo = x
6462
6463
6464 p := pp.Prog(obj.AFUNCDATA)
6465 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
6466 p.To.Type = obj.TYPE_MEM
6467 p.To.Name = obj.NAME_EXTERN
6468 p.To.Sym = x
6469 }
6470
6471
6472 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
6473 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
6474
6475
6476
6477
6478 PtrSize := int64(types.PtrSize)
6479 uintptrTyp := types.Types[types.TUINTPTR]
6480
6481 isAggregate := func(t *types.Type) bool {
6482 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
6483 }
6484
6485 wOff := 0
6486 n := 0
6487 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
6488
6489
6490 write1 := func(sz, offset int64) {
6491 if offset >= rtabi.TraceArgsSpecial {
6492 writebyte(rtabi.TraceArgsOffsetTooLarge)
6493 } else {
6494 writebyte(uint8(offset))
6495 writebyte(uint8(sz))
6496 }
6497 n++
6498 }
6499
6500
6501
6502 var visitType func(baseOffset int64, t *types.Type, depth int) bool
6503 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
6504 if n >= rtabi.TraceArgsLimit {
6505 writebyte(rtabi.TraceArgsDotdotdot)
6506 return false
6507 }
6508 if !isAggregate(t) {
6509 write1(t.Size(), baseOffset)
6510 return true
6511 }
6512 writebyte(rtabi.TraceArgsStartAgg)
6513 depth++
6514 if depth >= rtabi.TraceArgsMaxDepth {
6515 writebyte(rtabi.TraceArgsDotdotdot)
6516 writebyte(rtabi.TraceArgsEndAgg)
6517 n++
6518 return true
6519 }
6520 switch {
6521 case t.IsInterface(), t.IsString():
6522 _ = visitType(baseOffset, uintptrTyp, depth) &&
6523 visitType(baseOffset+PtrSize, uintptrTyp, depth)
6524 case t.IsSlice():
6525 _ = visitType(baseOffset, uintptrTyp, depth) &&
6526 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
6527 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
6528 case t.IsComplex():
6529 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
6530 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
6531 case t.IsArray():
6532 if t.NumElem() == 0 {
6533 n++
6534 break
6535 }
6536 for i := int64(0); i < t.NumElem(); i++ {
6537 if !visitType(baseOffset, t.Elem(), depth) {
6538 break
6539 }
6540 baseOffset += t.Elem().Size()
6541 }
6542 case t.IsStruct():
6543 if t.NumFields() == 0 {
6544 n++
6545 break
6546 }
6547 for _, field := range t.Fields() {
6548 if !visitType(baseOffset+field.Offset, field.Type, depth) {
6549 break
6550 }
6551 }
6552 }
6553 writebyte(rtabi.TraceArgsEndAgg)
6554 return true
6555 }
6556
6557 start := 0
6558 if strings.Contains(f.LSym.Name, "[") {
6559
6560 start = 1
6561 }
6562
6563 for _, a := range abiInfo.InParams()[start:] {
6564 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
6565 break
6566 }
6567 }
6568 writebyte(rtabi.TraceArgsEndSeq)
6569 if wOff > rtabi.TraceArgsMaxLen {
6570 base.Fatalf("ArgInfo too large")
6571 }
6572
6573 return x
6574 }
6575
6576
6577 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
6578 if base.Ctxt.Flag_linkshared {
6579
6580
6581 return
6582 }
6583
6584 wfn := e.curfn.WrappedFunc
6585 if wfn == nil {
6586 return
6587 }
6588
6589 wsym := wfn.Linksym()
6590 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
6591 objw.SymPtrOff(x, 0, wsym)
6592 x.Set(obj.AttrContentAddressable, true)
6593 })
6594 e.curfn.LSym.Func().WrapInfo = x
6595
6596
6597 p := pp.Prog(obj.AFUNCDATA)
6598 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
6599 p.To.Type = obj.TYPE_MEM
6600 p.To.Name = obj.NAME_EXTERN
6601 p.To.Sym = x
6602 }
6603
6604
6605 func genssa(f *ssa.Func, pp *objw.Progs) {
6606 var s State
6607 s.ABI = f.OwnAux.Fn.ABI()
6608
6609 e := f.Frontend().(*ssafn)
6610
6611 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
6612
6613 var lv *liveness.Liveness
6614 s.livenessMap, s.partLiveArgs, lv = liveness.Compute(e.curfn, f, e.stkptrsize, pp, gatherPrintInfo)
6615 emitArgInfo(e, f, pp)
6616 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
6617
6618 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
6619 if openDeferInfo != nil {
6620
6621
6622 p := pp.Prog(obj.AFUNCDATA)
6623 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
6624 p.To.Type = obj.TYPE_MEM
6625 p.To.Name = obj.NAME_EXTERN
6626 p.To.Sym = openDeferInfo
6627 }
6628
6629 emitWrappedFuncInfo(e, pp)
6630
6631
6632 s.bstart = make([]*obj.Prog, f.NumBlocks())
6633 s.pp = pp
6634 var progToValue map[*obj.Prog]*ssa.Value
6635 var progToBlock map[*obj.Prog]*ssa.Block
6636 var valueToProgAfter []*obj.Prog
6637 if gatherPrintInfo {
6638 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
6639 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
6640 f.Logf("genssa %s\n", f.Name)
6641 progToBlock[s.pp.Next] = f.Blocks[0]
6642 }
6643
6644 if base.Ctxt.Flag_locationlists {
6645 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
6646 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
6647 }
6648 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
6649 clear(valueToProgAfter)
6650 }
6651
6652
6653
6654 firstPos := src.NoXPos
6655 for _, v := range f.Entry.Values {
6656 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6657 firstPos = v.Pos
6658 v.Pos = firstPos.WithDefaultStmt()
6659 break
6660 }
6661 }
6662
6663
6664
6665
6666 var inlMarks map[*obj.Prog]int32
6667 var inlMarkList []*obj.Prog
6668
6669
6670
6671 var inlMarksByPos map[src.XPos][]*obj.Prog
6672
6673 var argLiveIdx int = -1
6674
6675
6676
6677
6678
6679 var hotAlign, hotRequire int64
6680
6681 if base.Debug.AlignHot > 0 {
6682 switch base.Ctxt.Arch.Name {
6683
6684
6685
6686
6687
6688 case "amd64", "386":
6689
6690
6691
6692 hotAlign = 64
6693 hotRequire = 31
6694 }
6695 }
6696
6697
6698 for i, b := range f.Blocks {
6699
6700 s.lineRunStart = nil
6701 s.SetPos(s.pp.Pos.WithNotStmt())
6702
6703 if hotAlign > 0 && b.Hotness&ssa.HotPgoInitial == ssa.HotPgoInitial {
6704
6705
6706
6707
6708
6709 p := s.pp.Prog(obj.APCALIGNMAX)
6710 p.From.SetConst(hotAlign)
6711 p.To.SetConst(hotRequire)
6712 }
6713
6714 s.bstart[b.ID] = s.pp.Next
6715
6716 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
6717 argLiveIdx = idx
6718 p := s.pp.Prog(obj.APCDATA)
6719 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6720 p.To.SetConst(int64(idx))
6721 }
6722
6723
6724 Arch.SSAMarkMoves(&s, b)
6725 for _, v := range b.Values {
6726 x := s.pp.Next
6727 s.DebugFriendlySetPosFrom(v)
6728
6729 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
6730 v.Fatalf("input[0] and output not in same register %s", v.LongString())
6731 }
6732
6733 switch v.Op {
6734 case ssa.OpInitMem:
6735
6736 case ssa.OpArg:
6737
6738 case ssa.OpSP, ssa.OpSB:
6739
6740 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
6741
6742 case ssa.OpGetG:
6743
6744
6745 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
6746
6747 case ssa.OpPhi:
6748 CheckLoweredPhi(v)
6749 case ssa.OpConvert:
6750
6751 if v.Args[0].Reg() != v.Reg() {
6752 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
6753 }
6754 case ssa.OpInlMark:
6755 p := Arch.Ginsnop(s.pp)
6756 if inlMarks == nil {
6757 inlMarks = map[*obj.Prog]int32{}
6758 inlMarksByPos = map[src.XPos][]*obj.Prog{}
6759 }
6760 inlMarks[p] = v.AuxInt32()
6761 inlMarkList = append(inlMarkList, p)
6762 pos := v.Pos.AtColumn1()
6763 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
6764 firstPos = src.NoXPos
6765
6766 default:
6767
6768 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
6769 s.SetPos(firstPos)
6770 firstPos = src.NoXPos
6771 }
6772
6773
6774 s.pp.NextLive = s.livenessMap.Get(v)
6775 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
6776
6777
6778 Arch.SSAGenValue(&s, v)
6779 }
6780
6781 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
6782 argLiveIdx = idx
6783 p := s.pp.Prog(obj.APCDATA)
6784 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
6785 p.To.SetConst(int64(idx))
6786 }
6787
6788 if base.Ctxt.Flag_locationlists {
6789 valueToProgAfter[v.ID] = s.pp.Next
6790 }
6791
6792 if gatherPrintInfo {
6793 for ; x != s.pp.Next; x = x.Link {
6794 progToValue[x] = v
6795 }
6796 }
6797 }
6798
6799 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
6800 p := Arch.Ginsnop(s.pp)
6801 p.Pos = p.Pos.WithIsStmt()
6802 if b.Pos == src.NoXPos {
6803 b.Pos = p.Pos
6804 if b.Pos == src.NoXPos {
6805 b.Pos = s.pp.Text.Pos
6806 }
6807 }
6808 b.Pos = b.Pos.WithBogusLine()
6809 }
6810
6811
6812
6813
6814
6815 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
6816
6817
6818 var next *ssa.Block
6819 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
6820
6821
6822
6823
6824 next = f.Blocks[i+1]
6825 }
6826 x := s.pp.Next
6827 s.SetPos(b.Pos)
6828 Arch.SSAGenBlock(&s, b, next)
6829 if gatherPrintInfo {
6830 for ; x != s.pp.Next; x = x.Link {
6831 progToBlock[x] = b
6832 }
6833 }
6834 }
6835 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
6836
6837
6838
6839
6840 Arch.Ginsnop(s.pp)
6841 }
6842 if openDeferInfo != nil {
6843
6844
6845
6846
6847
6848
6849
6850
6851 s.pp.NextLive = s.livenessMap.DeferReturn
6852 p := s.pp.Prog(obj.ACALL)
6853 p.To.Type = obj.TYPE_MEM
6854 p.To.Name = obj.NAME_EXTERN
6855 p.To.Sym = ir.Syms.Deferreturn
6856
6857
6858
6859
6860
6861 for _, o := range f.OwnAux.ABIInfo().OutParams() {
6862 n := o.Name
6863 rts, offs := o.RegisterTypesAndOffsets()
6864 for i := range o.Registers {
6865 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
6866 }
6867 }
6868
6869 s.pp.Prog(obj.ARET)
6870 }
6871
6872 if inlMarks != nil {
6873 hasCall := false
6874
6875
6876
6877
6878 for p := s.pp.Text; p != nil; p = p.Link {
6879 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT ||
6880 p.As == obj.APCALIGN || p.As == obj.APCALIGNMAX || Arch.LinkArch.Family == sys.Wasm {
6881
6882
6883
6884
6885
6886 continue
6887 }
6888 if _, ok := inlMarks[p]; ok {
6889
6890
6891 continue
6892 }
6893 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
6894 hasCall = true
6895 }
6896 pos := p.Pos.AtColumn1()
6897 marks := inlMarksByPos[pos]
6898 if len(marks) == 0 {
6899 continue
6900 }
6901 for _, m := range marks {
6902
6903
6904
6905 p.Pos = p.Pos.WithIsStmt()
6906 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
6907
6908 m.As = obj.ANOP
6909 m.Pos = src.NoXPos
6910 m.From = obj.Addr{}
6911 m.To = obj.Addr{}
6912 }
6913 delete(inlMarksByPos, pos)
6914 }
6915
6916 for _, p := range inlMarkList {
6917 if p.As != obj.ANOP {
6918 s.pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
6919 }
6920 }
6921
6922 if e.stksize == 0 && !hasCall {
6923
6924
6925
6926
6927
6928
6929 for p := s.pp.Text; p != nil; p = p.Link {
6930 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
6931 continue
6932 }
6933 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
6934
6935 nop := Arch.Ginsnop(s.pp)
6936 nop.Pos = e.curfn.Pos().WithIsStmt()
6937
6938
6939
6940
6941
6942 for x := s.pp.Text; x != nil; x = x.Link {
6943 if x.Link == nop {
6944 x.Link = nop.Link
6945 break
6946 }
6947 }
6948
6949 for x := s.pp.Text; x != nil; x = x.Link {
6950 if x.Link == p {
6951 nop.Link = p
6952 x.Link = nop
6953 break
6954 }
6955 }
6956 }
6957 break
6958 }
6959 }
6960 }
6961
6962 if base.Ctxt.Flag_locationlists {
6963 var debugInfo *ssa.FuncDebug
6964 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
6965
6966
6967 debugInfo.EntryID = f.Entry.ID
6968 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
6969 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
6970 } else {
6971 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
6972 }
6973 bstart := s.bstart
6974 idToIdx := make([]int, f.NumBlocks())
6975 for i, b := range f.Blocks {
6976 idToIdx[b.ID] = i
6977 }
6978
6979
6980
6981 debugInfo.GetPC = func(b, v ssa.ID) int64 {
6982 switch v {
6983 case ssa.BlockStart.ID:
6984 if b == f.Entry.ID {
6985 return 0
6986
6987 }
6988 return bstart[b].Pc
6989 case ssa.BlockEnd.ID:
6990 blk := f.Blocks[idToIdx[b]]
6991 nv := len(blk.Values)
6992 return valueToProgAfter[blk.Values[nv-1].ID].Pc
6993 case ssa.FuncEnd.ID:
6994 return e.curfn.LSym.Size
6995 default:
6996 return valueToProgAfter[v].Pc
6997 }
6998 }
6999 }
7000
7001
7002 for _, br := range s.Branches {
7003 br.P.To.SetTarget(s.bstart[br.B.ID])
7004 if br.P.Pos.IsStmt() != src.PosIsStmt {
7005 br.P.Pos = br.P.Pos.WithNotStmt()
7006 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7007 br.P.Pos = br.P.Pos.WithNotStmt()
7008 }
7009
7010 }
7011
7012
7013 for _, jt := range s.JumpTables {
7014
7015 targets := make([]*obj.Prog, len(jt.Succs))
7016 for i, e := range jt.Succs {
7017 targets[i] = s.bstart[e.Block().ID]
7018 }
7019
7020
7021
7022 fi := s.pp.CurFunc.LSym.Func()
7023 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7024 }
7025
7026 if e.log {
7027 filename := ""
7028 for p := s.pp.Text; p != nil; p = p.Link {
7029 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7030 filename = p.InnermostFilename()
7031 f.Logf("# %s\n", filename)
7032 }
7033
7034 var s string
7035 if v, ok := progToValue[p]; ok {
7036 s = v.String()
7037 } else if b, ok := progToBlock[p]; ok {
7038 s = b.String()
7039 } else {
7040 s = " "
7041 }
7042 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7043 }
7044 }
7045 if f.HTMLWriter != nil {
7046 var buf strings.Builder
7047 buf.WriteString("<code>")
7048 buf.WriteString("<dl class=\"ssa-gen\">")
7049 filename := ""
7050
7051 liveness := lv.Format(nil)
7052 if liveness != "" {
7053 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7054 buf.WriteString(html.EscapeString("# " + liveness))
7055 buf.WriteString("</dd>")
7056 }
7057
7058 for p := s.pp.Text; p != nil; p = p.Link {
7059
7060
7061 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7062 filename = p.InnermostFilename()
7063 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7064 buf.WriteString(html.EscapeString("# " + filename))
7065 buf.WriteString("</dd>")
7066 }
7067
7068 buf.WriteString("<dt class=\"ssa-prog-src\">")
7069 if v, ok := progToValue[p]; ok {
7070
7071
7072 if p.As != obj.APCDATA {
7073 if liveness := lv.Format(v); liveness != "" {
7074
7075 buf.WriteString("</dt><dd class=\"ssa-prog\">")
7076 buf.WriteString(html.EscapeString("# " + liveness))
7077 buf.WriteString("</dd>")
7078
7079 buf.WriteString("<dt class=\"ssa-prog-src\">")
7080 }
7081 }
7082
7083 buf.WriteString(v.HTML())
7084 } else if b, ok := progToBlock[p]; ok {
7085 buf.WriteString("<b>" + b.HTML() + "</b>")
7086 }
7087 buf.WriteString("</dt>")
7088 buf.WriteString("<dd class=\"ssa-prog\">")
7089 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7090 buf.WriteString("</dd>")
7091 }
7092 buf.WriteString("</dl>")
7093 buf.WriteString("</code>")
7094 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7095 }
7096 if ssa.GenssaDump[f.Name] {
7097 fi := f.DumpFileForPhase("genssa")
7098 if fi != nil {
7099
7100
7101 inliningDiffers := func(a, b []src.Pos) bool {
7102 if len(a) != len(b) {
7103 return true
7104 }
7105 for i := range a {
7106 if a[i].Filename() != b[i].Filename() {
7107 return true
7108 }
7109 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7110 return true
7111 }
7112 }
7113 return false
7114 }
7115
7116 var allPosOld []src.Pos
7117 var allPos []src.Pos
7118
7119 for p := s.pp.Text; p != nil; p = p.Link {
7120 if p.Pos.IsKnown() {
7121 allPos = allPos[:0]
7122 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7123 if inliningDiffers(allPos, allPosOld) {
7124 for _, pos := range allPos {
7125 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7126 }
7127 allPos, allPosOld = allPosOld, allPos
7128 }
7129 }
7130
7131 var s string
7132 if v, ok := progToValue[p]; ok {
7133 s = v.String()
7134 } else if b, ok := progToBlock[p]; ok {
7135 s = b.String()
7136 } else {
7137 s = " "
7138 }
7139 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7140 }
7141 fi.Close()
7142 }
7143 }
7144
7145 defframe(&s, e, f)
7146
7147 f.HTMLWriter.Close()
7148 f.HTMLWriter = nil
7149 }
7150
7151 func defframe(s *State, e *ssafn, f *ssa.Func) {
7152 pp := s.pp
7153
7154 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7155 frame := s.maxarg + e.stksize
7156 if Arch.PadFrame != nil {
7157 frame = Arch.PadFrame(frame)
7158 }
7159
7160
7161 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7162 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7163 pp.Text.To.Offset = frame
7164
7165 p := pp.Text
7166
7167
7168
7169
7170
7171
7172
7173
7174
7175
7176 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7177
7178
7179 type nameOff struct {
7180 n *ir.Name
7181 off int64
7182 }
7183 partLiveArgsSpilled := make(map[nameOff]bool)
7184 for _, v := range f.Entry.Values {
7185 if v.Op.IsCall() {
7186 break
7187 }
7188 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7189 continue
7190 }
7191 n, off := ssa.AutoVar(v)
7192 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7193 continue
7194 }
7195 partLiveArgsSpilled[nameOff{n, off}] = true
7196 }
7197
7198
7199 for _, a := range f.OwnAux.ABIInfo().InParams() {
7200 n := a.Name
7201 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7202 continue
7203 }
7204 rts, offs := a.RegisterTypesAndOffsets()
7205 for i := range a.Registers {
7206 if !rts[i].HasPointers() {
7207 continue
7208 }
7209 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7210 continue
7211 }
7212 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7213 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7214 }
7215 }
7216 }
7217
7218
7219
7220
7221 var lo, hi int64
7222
7223
7224
7225 var state uint32
7226
7227
7228
7229 for _, n := range e.curfn.Dcl {
7230 if !n.Needzero() {
7231 continue
7232 }
7233 if n.Class != ir.PAUTO {
7234 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7235 }
7236 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7237 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7238 }
7239
7240 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7241
7242 lo = n.FrameOffset()
7243 continue
7244 }
7245
7246
7247 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7248
7249
7250 lo = n.FrameOffset()
7251 hi = lo + n.Type().Size()
7252 }
7253
7254
7255 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7256 }
7257
7258
7259 type IndexJump struct {
7260 Jump obj.As
7261 Index int
7262 }
7263
7264 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7265 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7266 p.Pos = b.Pos
7267 }
7268
7269
7270
7271 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7272 switch next {
7273 case b.Succs[0].Block():
7274 s.oneJump(b, &jumps[0][0])
7275 s.oneJump(b, &jumps[0][1])
7276 case b.Succs[1].Block():
7277 s.oneJump(b, &jumps[1][0])
7278 s.oneJump(b, &jumps[1][1])
7279 default:
7280 var q *obj.Prog
7281 if b.Likely != ssa.BranchUnlikely {
7282 s.oneJump(b, &jumps[1][0])
7283 s.oneJump(b, &jumps[1][1])
7284 q = s.Br(obj.AJMP, b.Succs[1].Block())
7285 } else {
7286 s.oneJump(b, &jumps[0][0])
7287 s.oneJump(b, &jumps[0][1])
7288 q = s.Br(obj.AJMP, b.Succs[0].Block())
7289 }
7290 q.Pos = b.Pos
7291 }
7292 }
7293
7294
7295 func AddAux(a *obj.Addr, v *ssa.Value) {
7296 AddAux2(a, v, v.AuxInt)
7297 }
7298 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7299 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7300 v.Fatalf("bad AddAux addr %v", a)
7301 }
7302
7303 a.Offset += offset
7304
7305
7306 if v.Aux == nil {
7307 return
7308 }
7309
7310 switch n := v.Aux.(type) {
7311 case *ssa.AuxCall:
7312 a.Name = obj.NAME_EXTERN
7313 a.Sym = n.Fn
7314 case *obj.LSym:
7315 a.Name = obj.NAME_EXTERN
7316 a.Sym = n
7317 case *ir.Name:
7318 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7319 a.Name = obj.NAME_PARAM
7320 } else {
7321 a.Name = obj.NAME_AUTO
7322 }
7323 a.Sym = n.Linksym()
7324 a.Offset += n.FrameOffset()
7325 default:
7326 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7327 }
7328 }
7329
7330
7331
7332 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7333 size := idx.Type.Size()
7334 if size == s.config.PtrSize {
7335 return idx
7336 }
7337 if size > s.config.PtrSize {
7338
7339
7340 var lo *ssa.Value
7341 if idx.Type.IsSigned() {
7342 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7343 } else {
7344 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7345 }
7346 if bounded || base.Flag.B != 0 {
7347 return lo
7348 }
7349 bNext := s.f.NewBlock(ssa.BlockPlain)
7350 bPanic := s.f.NewBlock(ssa.BlockExit)
7351 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7352 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7353 if !idx.Type.IsSigned() {
7354 switch kind {
7355 case ssa.BoundsIndex:
7356 kind = ssa.BoundsIndexU
7357 case ssa.BoundsSliceAlen:
7358 kind = ssa.BoundsSliceAlenU
7359 case ssa.BoundsSliceAcap:
7360 kind = ssa.BoundsSliceAcapU
7361 case ssa.BoundsSliceB:
7362 kind = ssa.BoundsSliceBU
7363 case ssa.BoundsSlice3Alen:
7364 kind = ssa.BoundsSlice3AlenU
7365 case ssa.BoundsSlice3Acap:
7366 kind = ssa.BoundsSlice3AcapU
7367 case ssa.BoundsSlice3B:
7368 kind = ssa.BoundsSlice3BU
7369 case ssa.BoundsSlice3C:
7370 kind = ssa.BoundsSlice3CU
7371 }
7372 }
7373 b := s.endBlock()
7374 b.Kind = ssa.BlockIf
7375 b.SetControl(cmp)
7376 b.Likely = ssa.BranchLikely
7377 b.AddEdgeTo(bNext)
7378 b.AddEdgeTo(bPanic)
7379
7380 s.startBlock(bPanic)
7381 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7382 s.endBlock().SetControl(mem)
7383 s.startBlock(bNext)
7384
7385 return lo
7386 }
7387
7388
7389 var op ssa.Op
7390 if idx.Type.IsSigned() {
7391 switch 10*size + s.config.PtrSize {
7392 case 14:
7393 op = ssa.OpSignExt8to32
7394 case 18:
7395 op = ssa.OpSignExt8to64
7396 case 24:
7397 op = ssa.OpSignExt16to32
7398 case 28:
7399 op = ssa.OpSignExt16to64
7400 case 48:
7401 op = ssa.OpSignExt32to64
7402 default:
7403 s.Fatalf("bad signed index extension %s", idx.Type)
7404 }
7405 } else {
7406 switch 10*size + s.config.PtrSize {
7407 case 14:
7408 op = ssa.OpZeroExt8to32
7409 case 18:
7410 op = ssa.OpZeroExt8to64
7411 case 24:
7412 op = ssa.OpZeroExt16to32
7413 case 28:
7414 op = ssa.OpZeroExt16to64
7415 case 48:
7416 op = ssa.OpZeroExt32to64
7417 default:
7418 s.Fatalf("bad unsigned index extension %s", idx.Type)
7419 }
7420 }
7421 return s.newValue1(op, types.Types[types.TINT], idx)
7422 }
7423
7424
7425
7426 func CheckLoweredPhi(v *ssa.Value) {
7427 if v.Op != ssa.OpPhi {
7428 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
7429 }
7430 if v.Type.IsMemory() {
7431 return
7432 }
7433 f := v.Block.Func
7434 loc := f.RegAlloc[v.ID]
7435 for _, a := range v.Args {
7436 if aloc := f.RegAlloc[a.ID]; aloc != loc {
7437 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
7438 }
7439 }
7440 }
7441
7442
7443
7444
7445
7446 func CheckLoweredGetClosurePtr(v *ssa.Value) {
7447 entry := v.Block.Func.Entry
7448 if entry != v.Block {
7449 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7450 }
7451 for _, w := range entry.Values {
7452 if w == v {
7453 break
7454 }
7455 switch w.Op {
7456 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
7457
7458 default:
7459 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
7460 }
7461 }
7462 }
7463
7464
7465 func CheckArgReg(v *ssa.Value) {
7466 entry := v.Block.Func.Entry
7467 if entry != v.Block {
7468 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
7469 }
7470 }
7471
7472 func AddrAuto(a *obj.Addr, v *ssa.Value) {
7473 n, off := ssa.AutoVar(v)
7474 a.Type = obj.TYPE_MEM
7475 a.Sym = n.Linksym()
7476 a.Reg = int16(Arch.REGSP)
7477 a.Offset = n.FrameOffset() + off
7478 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7479 a.Name = obj.NAME_PARAM
7480 } else {
7481 a.Name = obj.NAME_AUTO
7482 }
7483 }
7484
7485
7486
7487 func (s *State) Call(v *ssa.Value) *obj.Prog {
7488 pPosIsStmt := s.pp.Pos.IsStmt()
7489 s.PrepareCall(v)
7490
7491 p := s.Prog(obj.ACALL)
7492 if pPosIsStmt == src.PosIsStmt {
7493 p.Pos = v.Pos.WithIsStmt()
7494 } else {
7495 p.Pos = v.Pos.WithNotStmt()
7496 }
7497 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
7498 p.To.Type = obj.TYPE_MEM
7499 p.To.Name = obj.NAME_EXTERN
7500 p.To.Sym = sym.Fn
7501 } else {
7502
7503 switch Arch.LinkArch.Family {
7504 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
7505 p.To.Type = obj.TYPE_REG
7506 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
7507 p.To.Type = obj.TYPE_MEM
7508 default:
7509 base.Fatalf("unknown indirect call family")
7510 }
7511 p.To.Reg = v.Args[0].Reg()
7512 }
7513 return p
7514 }
7515
7516
7517
7518 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
7519 p := s.Call(v)
7520 p.As = obj.ARET
7521 return p
7522 }
7523
7524
7525
7526
7527 func (s *State) PrepareCall(v *ssa.Value) {
7528 idx := s.livenessMap.Get(v)
7529 if !idx.StackMapValid() {
7530
7531 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
7532 base.Fatalf("missing stack map index for %v", v.LongString())
7533 }
7534 }
7535
7536 call, ok := v.Aux.(*ssa.AuxCall)
7537
7538 if ok {
7539
7540
7541 if nowritebarrierrecCheck != nil {
7542 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
7543 }
7544 }
7545
7546 if s.maxarg < v.AuxInt {
7547 s.maxarg = v.AuxInt
7548 }
7549 }
7550
7551
7552
7553 func (s *State) UseArgs(n int64) {
7554 if s.maxarg < n {
7555 s.maxarg = n
7556 }
7557 }
7558
7559
7560 func fieldIdx(n *ir.SelectorExpr) int {
7561 t := n.X.Type()
7562 if !t.IsStruct() {
7563 panic("ODOT's LHS is not a struct")
7564 }
7565
7566 for i, f := range t.Fields() {
7567 if f.Sym == n.Sel {
7568 if f.Offset != n.Offset() {
7569 panic("field offset doesn't match")
7570 }
7571 return i
7572 }
7573 }
7574 panic(fmt.Sprintf("can't find field in expr %v\n", n))
7575
7576
7577
7578 }
7579
7580
7581
7582 type ssafn struct {
7583 curfn *ir.Func
7584 strings map[string]*obj.LSym
7585 stksize int64
7586 stkptrsize int64
7587
7588
7589
7590
7591
7592 stkalign int64
7593
7594 log bool
7595 }
7596
7597
7598
7599 func (e *ssafn) StringData(s string) *obj.LSym {
7600 if aux, ok := e.strings[s]; ok {
7601 return aux
7602 }
7603 if e.strings == nil {
7604 e.strings = make(map[string]*obj.LSym)
7605 }
7606 data := staticdata.StringSym(e.curfn.Pos(), s)
7607 e.strings[s] = data
7608 return data
7609 }
7610
7611
7612 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
7613 node := parent.N
7614
7615 if node.Class != ir.PAUTO || node.Addrtaken() {
7616
7617 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
7618 }
7619
7620 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
7621 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
7622 n.SetUsed(true)
7623 n.SetEsc(ir.EscNever)
7624 types.CalcSize(t)
7625 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
7626 }
7627
7628
7629 func (e *ssafn) Logf(msg string, args ...interface{}) {
7630 if e.log {
7631 fmt.Printf(msg, args...)
7632 }
7633 }
7634
7635 func (e *ssafn) Log() bool {
7636 return e.log
7637 }
7638
7639
7640 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
7641 base.Pos = pos
7642 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
7643 base.Fatalf("'%s': "+msg, nargs...)
7644 }
7645
7646
7647
7648 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
7649 base.WarnfAt(pos, fmt_, args...)
7650 }
7651
7652 func (e *ssafn) Debug_checknil() bool {
7653 return base.Debug.Nil != 0
7654 }
7655
7656 func (e *ssafn) UseWriteBarrier() bool {
7657 return base.Flag.WB
7658 }
7659
7660 func (e *ssafn) Syslook(name string) *obj.LSym {
7661 switch name {
7662 case "goschedguarded":
7663 return ir.Syms.Goschedguarded
7664 case "writeBarrier":
7665 return ir.Syms.WriteBarrier
7666 case "wbZero":
7667 return ir.Syms.WBZero
7668 case "wbMove":
7669 return ir.Syms.WBMove
7670 case "cgoCheckMemmove":
7671 return ir.Syms.CgoCheckMemmove
7672 case "cgoCheckPtrWrite":
7673 return ir.Syms.CgoCheckPtrWrite
7674 }
7675 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
7676 return nil
7677 }
7678
7679 func (e *ssafn) Func() *ir.Func {
7680 return e.curfn
7681 }
7682
7683 func clobberBase(n ir.Node) ir.Node {
7684 if n.Op() == ir.ODOT {
7685 n := n.(*ir.SelectorExpr)
7686 if n.X.Type().NumFields() == 1 {
7687 return clobberBase(n.X)
7688 }
7689 }
7690 if n.Op() == ir.OINDEX {
7691 n := n.(*ir.IndexExpr)
7692 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
7693 return clobberBase(n.X)
7694 }
7695 }
7696 return n
7697 }
7698
7699
7700 func callTargetLSym(callee *ir.Name) *obj.LSym {
7701 if callee.Func == nil {
7702
7703
7704
7705 return callee.Linksym()
7706 }
7707
7708 return callee.LinksymABI(callee.Func.ABI)
7709 }
7710
7711
7712 const deferStructFnField = 4
7713
7714 var deferType *types.Type
7715
7716
7717
7718 func deferstruct() *types.Type {
7719 if deferType != nil {
7720 return deferType
7721 }
7722
7723 makefield := func(name string, t *types.Type) *types.Field {
7724 sym := (*types.Pkg)(nil).Lookup(name)
7725 return types.NewField(src.NoXPos, sym, t)
7726 }
7727
7728 fields := []*types.Field{
7729 makefield("heap", types.Types[types.TBOOL]),
7730 makefield("rangefunc", types.Types[types.TBOOL]),
7731 makefield("sp", types.Types[types.TUINTPTR]),
7732 makefield("pc", types.Types[types.TUINTPTR]),
7733
7734
7735
7736 makefield("fn", types.Types[types.TUINTPTR]),
7737 makefield("link", types.Types[types.TUINTPTR]),
7738 makefield("head", types.Types[types.TUINTPTR]),
7739 }
7740 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
7741 base.Fatalf("deferStructFnField is %q, not fn", name)
7742 }
7743
7744 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
7745 typ := types.NewNamed(n)
7746 n.SetType(typ)
7747 n.SetTypecheck(1)
7748
7749
7750 typ.SetUnderlying(types.NewStruct(fields))
7751 types.CalcStructSize(typ)
7752
7753 deferType = typ
7754 return typ
7755 }
7756
7757
7758
7759
7760
7761 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
7762 return obj.Addr{
7763 Name: obj.NAME_NONE,
7764 Type: obj.TYPE_MEM,
7765 Reg: baseReg,
7766 Offset: spill.Offset + extraOffset,
7767 }
7768 }
7769
7770 var (
7771 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
7772 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
7773 )
7774
View as plain text