1
2
3
4
5 package arm64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/objw"
14 "cmd/compile/internal/ssa"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/types"
17 "cmd/internal/obj"
18 "cmd/internal/obj/arm64"
19 "internal/abi"
20 )
21
22
23 func loadByType(t *types.Type) obj.As {
24 if t.IsFloat() {
25 switch t.Size() {
26 case 4:
27 return arm64.AFMOVS
28 case 8:
29 return arm64.AFMOVD
30 }
31 } else {
32 switch t.Size() {
33 case 1:
34 if t.IsSigned() {
35 return arm64.AMOVB
36 } else {
37 return arm64.AMOVBU
38 }
39 case 2:
40 if t.IsSigned() {
41 return arm64.AMOVH
42 } else {
43 return arm64.AMOVHU
44 }
45 case 4:
46 if t.IsSigned() {
47 return arm64.AMOVW
48 } else {
49 return arm64.AMOVWU
50 }
51 case 8:
52 return arm64.AMOVD
53 }
54 }
55 panic("bad load type")
56 }
57
58
59 func storeByType(t *types.Type) obj.As {
60 if t.IsFloat() {
61 switch t.Size() {
62 case 4:
63 return arm64.AFMOVS
64 case 8:
65 return arm64.AFMOVD
66 }
67 } else {
68 switch t.Size() {
69 case 1:
70 return arm64.AMOVB
71 case 2:
72 return arm64.AMOVH
73 case 4:
74 return arm64.AMOVW
75 case 8:
76 return arm64.AMOVD
77 }
78 }
79 panic("bad store type")
80 }
81
82
83
84 func loadByType2(t *types.Type) obj.As {
85 if t.IsFloat() {
86 switch t.Size() {
87 case 4:
88 return arm64.AFLDPS
89 case 8:
90 return arm64.AFLDPD
91 }
92 } else {
93 switch t.Size() {
94 case 4:
95 return arm64.ALDPW
96 case 8:
97 return arm64.ALDP
98 }
99 }
100 return obj.AXXX
101 }
102
103
104
105 func storeByType2(t *types.Type) obj.As {
106 if t.IsFloat() {
107 switch t.Size() {
108 case 4:
109 return arm64.AFSTPS
110 case 8:
111 return arm64.AFSTPD
112 }
113 } else {
114 switch t.Size() {
115 case 4:
116 return arm64.ASTPW
117 case 8:
118 return arm64.ASTP
119 }
120 }
121 return obj.AXXX
122 }
123
124
125 func makeshift(v *ssa.Value, reg int16, typ int64, s int64) int64 {
126 if s < 0 || s >= 64 {
127 v.Fatalf("shift out of range: %d", s)
128 }
129 return int64(reg&31)<<16 | typ | (s&63)<<10
130 }
131
132
133 func genshift(s *ssagen.State, v *ssa.Value, as obj.As, r0, r1, r int16, typ int64, n int64) *obj.Prog {
134 p := s.Prog(as)
135 p.From.Type = obj.TYPE_SHIFT
136 p.From.Offset = makeshift(v, r1, typ, n)
137 p.Reg = r0
138 if r != 0 {
139 p.To.Type = obj.TYPE_REG
140 p.To.Reg = r
141 }
142 return p
143 }
144
145
146
147 func genIndexedOperand(op ssa.Op, base, idx int16) obj.Addr {
148
149 mop := obj.Addr{Type: obj.TYPE_MEM, Reg: base}
150 switch op {
151 case ssa.OpARM64MOVDloadidx8, ssa.OpARM64MOVDstoreidx8,
152 ssa.OpARM64FMOVDloadidx8, ssa.OpARM64FMOVDstoreidx8:
153 mop.Index = arm64.REG_LSL | 3<<5 | idx&31
154 case ssa.OpARM64MOVWloadidx4, ssa.OpARM64MOVWUloadidx4, ssa.OpARM64MOVWstoreidx4,
155 ssa.OpARM64FMOVSloadidx4, ssa.OpARM64FMOVSstoreidx4:
156 mop.Index = arm64.REG_LSL | 2<<5 | idx&31
157 case ssa.OpARM64MOVHloadidx2, ssa.OpARM64MOVHUloadidx2, ssa.OpARM64MOVHstoreidx2:
158 mop.Index = arm64.REG_LSL | 1<<5 | idx&31
159 default:
160 mop.Index = idx
161 }
162 return mop
163 }
164
165 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
166 switch v.Op {
167 case ssa.OpCopy, ssa.OpARM64MOVDreg:
168 if v.Type.IsMemory() {
169 return
170 }
171 x := v.Args[0].Reg()
172 y := v.Reg()
173 if x == y {
174 return
175 }
176 as := arm64.AMOVD
177 if v.Type.IsFloat() {
178 switch v.Type.Size() {
179 case 4:
180 as = arm64.AFMOVS
181 case 8:
182 as = arm64.AFMOVD
183 default:
184 panic("bad float size")
185 }
186 }
187 p := s.Prog(as)
188 p.From.Type = obj.TYPE_REG
189 p.From.Reg = x
190 p.To.Type = obj.TYPE_REG
191 p.To.Reg = y
192 case ssa.OpARM64MOVDnop, ssa.OpARM64ZERO:
193
194 case ssa.OpLoadReg:
195 if v.Type.IsFlags() {
196 v.Fatalf("load flags not implemented: %v", v.LongString())
197 return
198 }
199 p := s.Prog(loadByType(v.Type))
200 ssagen.AddrAuto(&p.From, v.Args[0])
201 p.To.Type = obj.TYPE_REG
202 p.To.Reg = v.Reg()
203 case ssa.OpStoreReg:
204 if v.Type.IsFlags() {
205 v.Fatalf("store flags not implemented: %v", v.LongString())
206 return
207 }
208 p := s.Prog(storeByType(v.Type))
209 p.From.Type = obj.TYPE_REG
210 p.From.Reg = v.Args[0].Reg()
211 ssagen.AddrAuto(&p.To, v)
212 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
213 ssagen.CheckArgReg(v)
214
215
216 args := v.Block.Func.RegArgs
217 if len(args) == 0 {
218 break
219 }
220 v.Block.Func.RegArgs = nil
221
222 for i := 0; i < len(args); i++ {
223 a := args[i]
224
225 addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.Arch.FixedFrameSize)
226
227 if i < len(args)-1 {
228 b := args[i+1]
229 if a.Type.Size() == b.Type.Size() &&
230 a.Type.IsFloat() == b.Type.IsFloat() &&
231 b.Offset == a.Offset+a.Type.Size() {
232 ld := loadByType2(a.Type)
233 st := storeByType2(a.Type)
234 if ld != obj.AXXX && st != obj.AXXX {
235 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Reg2: b.Reg, Addr: addr, Unspill: ld, Spill: st})
236 i++
237 continue
238 }
239 }
240 }
241
242 s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
243 }
244
245 case ssa.OpARM64ADD,
246 ssa.OpARM64SUB,
247 ssa.OpARM64AND,
248 ssa.OpARM64OR,
249 ssa.OpARM64XOR,
250 ssa.OpARM64BIC,
251 ssa.OpARM64EON,
252 ssa.OpARM64ORN,
253 ssa.OpARM64MUL,
254 ssa.OpARM64MULW,
255 ssa.OpARM64MNEG,
256 ssa.OpARM64MNEGW,
257 ssa.OpARM64MULH,
258 ssa.OpARM64UMULH,
259 ssa.OpARM64MULL,
260 ssa.OpARM64UMULL,
261 ssa.OpARM64DIV,
262 ssa.OpARM64UDIV,
263 ssa.OpARM64DIVW,
264 ssa.OpARM64UDIVW,
265 ssa.OpARM64MOD,
266 ssa.OpARM64UMOD,
267 ssa.OpARM64MODW,
268 ssa.OpARM64UMODW,
269 ssa.OpARM64SLL,
270 ssa.OpARM64SRL,
271 ssa.OpARM64SRA,
272 ssa.OpARM64FADDS,
273 ssa.OpARM64FADDD,
274 ssa.OpARM64FSUBS,
275 ssa.OpARM64FSUBD,
276 ssa.OpARM64FMULS,
277 ssa.OpARM64FMULD,
278 ssa.OpARM64FNMULS,
279 ssa.OpARM64FNMULD,
280 ssa.OpARM64FDIVS,
281 ssa.OpARM64FDIVD,
282 ssa.OpARM64FMINS,
283 ssa.OpARM64FMIND,
284 ssa.OpARM64FMAXS,
285 ssa.OpARM64FMAXD,
286 ssa.OpARM64ROR,
287 ssa.OpARM64RORW:
288 r := v.Reg()
289 r1 := v.Args[0].Reg()
290 r2 := v.Args[1].Reg()
291 p := s.Prog(v.Op.Asm())
292 p.From.Type = obj.TYPE_REG
293 p.From.Reg = r2
294 p.Reg = r1
295 p.To.Type = obj.TYPE_REG
296 p.To.Reg = r
297 case ssa.OpARM64FMADDS,
298 ssa.OpARM64FMADDD,
299 ssa.OpARM64FNMADDS,
300 ssa.OpARM64FNMADDD,
301 ssa.OpARM64FMSUBS,
302 ssa.OpARM64FMSUBD,
303 ssa.OpARM64FNMSUBS,
304 ssa.OpARM64FNMSUBD,
305 ssa.OpARM64MADD,
306 ssa.OpARM64MADDW,
307 ssa.OpARM64MSUB,
308 ssa.OpARM64MSUBW:
309 rt := v.Reg()
310 ra := v.Args[0].Reg()
311 rm := v.Args[1].Reg()
312 rn := v.Args[2].Reg()
313 p := s.Prog(v.Op.Asm())
314 p.Reg = ra
315 p.From.Type = obj.TYPE_REG
316 p.From.Reg = rm
317 p.AddRestSourceReg(rn)
318 p.To.Type = obj.TYPE_REG
319 p.To.Reg = rt
320 case ssa.OpARM64ADDconst,
321 ssa.OpARM64SUBconst,
322 ssa.OpARM64ANDconst,
323 ssa.OpARM64ORconst,
324 ssa.OpARM64XORconst,
325 ssa.OpARM64SLLconst,
326 ssa.OpARM64SRLconst,
327 ssa.OpARM64SRAconst,
328 ssa.OpARM64RORconst,
329 ssa.OpARM64RORWconst:
330 p := s.Prog(v.Op.Asm())
331 p.From.Type = obj.TYPE_CONST
332 p.From.Offset = v.AuxInt
333 p.Reg = v.Args[0].Reg()
334 p.To.Type = obj.TYPE_REG
335 p.To.Reg = v.Reg()
336 case ssa.OpARM64ADDSconstflags:
337 p := s.Prog(v.Op.Asm())
338 p.From.Type = obj.TYPE_CONST
339 p.From.Offset = v.AuxInt
340 p.Reg = v.Args[0].Reg()
341 p.To.Type = obj.TYPE_REG
342 p.To.Reg = v.Reg0()
343 case ssa.OpARM64ADCzerocarry:
344 p := s.Prog(v.Op.Asm())
345 p.From.Type = obj.TYPE_REG
346 p.From.Reg = arm64.REGZERO
347 p.Reg = arm64.REGZERO
348 p.To.Type = obj.TYPE_REG
349 p.To.Reg = v.Reg()
350 case ssa.OpARM64ADCSflags,
351 ssa.OpARM64ADDSflags,
352 ssa.OpARM64SBCSflags,
353 ssa.OpARM64SUBSflags:
354 r := v.Reg0()
355 r1 := v.Args[0].Reg()
356 r2 := v.Args[1].Reg()
357 p := s.Prog(v.Op.Asm())
358 p.From.Type = obj.TYPE_REG
359 p.From.Reg = r2
360 p.Reg = r1
361 p.To.Type = obj.TYPE_REG
362 p.To.Reg = r
363 case ssa.OpARM64NEGSflags:
364 p := s.Prog(v.Op.Asm())
365 p.From.Type = obj.TYPE_REG
366 p.From.Reg = v.Args[0].Reg()
367 p.To.Type = obj.TYPE_REG
368 p.To.Reg = v.Reg0()
369 case ssa.OpARM64NGCzerocarry:
370 p := s.Prog(v.Op.Asm())
371 p.From.Type = obj.TYPE_REG
372 p.From.Reg = arm64.REGZERO
373 p.To.Type = obj.TYPE_REG
374 p.To.Reg = v.Reg()
375 case ssa.OpARM64EXTRconst,
376 ssa.OpARM64EXTRWconst:
377 p := s.Prog(v.Op.Asm())
378 p.From.Type = obj.TYPE_CONST
379 p.From.Offset = v.AuxInt
380 p.AddRestSourceReg(v.Args[0].Reg())
381 p.Reg = v.Args[1].Reg()
382 p.To.Type = obj.TYPE_REG
383 p.To.Reg = v.Reg()
384 case ssa.OpARM64MVNshiftLL, ssa.OpARM64NEGshiftLL:
385 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
386 case ssa.OpARM64MVNshiftRL, ssa.OpARM64NEGshiftRL:
387 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
388 case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
389 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
390 case ssa.OpARM64MVNshiftRO:
391 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
392 case ssa.OpARM64ADDshiftLL,
393 ssa.OpARM64SUBshiftLL,
394 ssa.OpARM64ANDshiftLL,
395 ssa.OpARM64ORshiftLL,
396 ssa.OpARM64XORshiftLL,
397 ssa.OpARM64EONshiftLL,
398 ssa.OpARM64ORNshiftLL,
399 ssa.OpARM64BICshiftLL:
400 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
401 case ssa.OpARM64ADDshiftRL,
402 ssa.OpARM64SUBshiftRL,
403 ssa.OpARM64ANDshiftRL,
404 ssa.OpARM64ORshiftRL,
405 ssa.OpARM64XORshiftRL,
406 ssa.OpARM64EONshiftRL,
407 ssa.OpARM64ORNshiftRL,
408 ssa.OpARM64BICshiftRL:
409 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
410 case ssa.OpARM64ADDshiftRA,
411 ssa.OpARM64SUBshiftRA,
412 ssa.OpARM64ANDshiftRA,
413 ssa.OpARM64ORshiftRA,
414 ssa.OpARM64XORshiftRA,
415 ssa.OpARM64EONshiftRA,
416 ssa.OpARM64ORNshiftRA,
417 ssa.OpARM64BICshiftRA:
418 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
419 case ssa.OpARM64ANDshiftRO,
420 ssa.OpARM64ORshiftRO,
421 ssa.OpARM64XORshiftRO,
422 ssa.OpARM64EONshiftRO,
423 ssa.OpARM64ORNshiftRO,
424 ssa.OpARM64BICshiftRO:
425 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
426 case ssa.OpARM64MOVDconst:
427 p := s.Prog(v.Op.Asm())
428 p.From.Type = obj.TYPE_CONST
429 p.From.Offset = v.AuxInt
430 p.To.Type = obj.TYPE_REG
431 p.To.Reg = v.Reg()
432 case ssa.OpARM64FMOVSconst,
433 ssa.OpARM64FMOVDconst:
434 p := s.Prog(v.Op.Asm())
435 p.From.Type = obj.TYPE_FCONST
436 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
437 p.To.Type = obj.TYPE_REG
438 p.To.Reg = v.Reg()
439 case ssa.OpARM64FCMPS0,
440 ssa.OpARM64FCMPD0:
441 p := s.Prog(v.Op.Asm())
442 p.From.Type = obj.TYPE_FCONST
443 p.From.Val = math.Float64frombits(0)
444 p.Reg = v.Args[0].Reg()
445 case ssa.OpARM64CMP,
446 ssa.OpARM64CMPW,
447 ssa.OpARM64CMN,
448 ssa.OpARM64CMNW,
449 ssa.OpARM64TST,
450 ssa.OpARM64TSTW,
451 ssa.OpARM64FCMPS,
452 ssa.OpARM64FCMPD:
453 p := s.Prog(v.Op.Asm())
454 p.From.Type = obj.TYPE_REG
455 p.From.Reg = v.Args[1].Reg()
456 p.Reg = v.Args[0].Reg()
457 case ssa.OpARM64CMPconst,
458 ssa.OpARM64CMPWconst,
459 ssa.OpARM64CMNconst,
460 ssa.OpARM64CMNWconst,
461 ssa.OpARM64TSTconst,
462 ssa.OpARM64TSTWconst:
463 p := s.Prog(v.Op.Asm())
464 p.From.Type = obj.TYPE_CONST
465 p.From.Offset = v.AuxInt
466 p.Reg = v.Args[0].Reg()
467 case ssa.OpARM64CMPshiftLL, ssa.OpARM64CMNshiftLL, ssa.OpARM64TSTshiftLL:
468 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LL, v.AuxInt)
469 case ssa.OpARM64CMPshiftRL, ssa.OpARM64CMNshiftRL, ssa.OpARM64TSTshiftRL:
470 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
471 case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
472 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
473 case ssa.OpARM64TSTshiftRO:
474 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
475 case ssa.OpARM64MOVDaddr:
476 p := s.Prog(arm64.AMOVD)
477 p.From.Type = obj.TYPE_ADDR
478 p.From.Reg = v.Args[0].Reg()
479 p.To.Type = obj.TYPE_REG
480 p.To.Reg = v.Reg()
481
482 var wantreg string
483
484
485
486
487
488 switch v.Aux.(type) {
489 default:
490 v.Fatalf("aux is of unknown type %T", v.Aux)
491 case *obj.LSym:
492 wantreg = "SB"
493 ssagen.AddAux(&p.From, v)
494 case *ir.Name:
495 wantreg = "SP"
496 ssagen.AddAux(&p.From, v)
497 case nil:
498
499 wantreg = "SP"
500 p.From.Offset = v.AuxInt
501 }
502 if reg := v.Args[0].RegName(); reg != wantreg {
503 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
504 }
505 case ssa.OpARM64MOVBload,
506 ssa.OpARM64MOVBUload,
507 ssa.OpARM64MOVHload,
508 ssa.OpARM64MOVHUload,
509 ssa.OpARM64MOVWload,
510 ssa.OpARM64MOVWUload,
511 ssa.OpARM64MOVDload,
512 ssa.OpARM64FMOVSload,
513 ssa.OpARM64FMOVDload:
514 p := s.Prog(v.Op.Asm())
515 p.From.Type = obj.TYPE_MEM
516 p.From.Reg = v.Args[0].Reg()
517 ssagen.AddAux(&p.From, v)
518 p.To.Type = obj.TYPE_REG
519 p.To.Reg = v.Reg()
520 case ssa.OpARM64LDP, ssa.OpARM64LDPW, ssa.OpARM64LDPSW, ssa.OpARM64FLDPD, ssa.OpARM64FLDPS:
521 p := s.Prog(v.Op.Asm())
522 p.From.Type = obj.TYPE_MEM
523 p.From.Reg = v.Args[0].Reg()
524 ssagen.AddAux(&p.From, v)
525 p.To.Type = obj.TYPE_REGREG
526 p.To.Reg = v.Reg0()
527 p.To.Offset = int64(v.Reg1())
528 case ssa.OpARM64MOVBloadidx,
529 ssa.OpARM64MOVBUloadidx,
530 ssa.OpARM64MOVHloadidx,
531 ssa.OpARM64MOVHUloadidx,
532 ssa.OpARM64MOVWloadidx,
533 ssa.OpARM64MOVWUloadidx,
534 ssa.OpARM64MOVDloadidx,
535 ssa.OpARM64FMOVSloadidx,
536 ssa.OpARM64FMOVDloadidx,
537 ssa.OpARM64MOVHloadidx2,
538 ssa.OpARM64MOVHUloadidx2,
539 ssa.OpARM64MOVWloadidx4,
540 ssa.OpARM64MOVWUloadidx4,
541 ssa.OpARM64MOVDloadidx8,
542 ssa.OpARM64FMOVDloadidx8,
543 ssa.OpARM64FMOVSloadidx4:
544 p := s.Prog(v.Op.Asm())
545 p.From = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
546 p.To.Type = obj.TYPE_REG
547 p.To.Reg = v.Reg()
548 case ssa.OpARM64LDAR,
549 ssa.OpARM64LDARB,
550 ssa.OpARM64LDARW:
551 p := s.Prog(v.Op.Asm())
552 p.From.Type = obj.TYPE_MEM
553 p.From.Reg = v.Args[0].Reg()
554 ssagen.AddAux(&p.From, v)
555 p.To.Type = obj.TYPE_REG
556 p.To.Reg = v.Reg0()
557 case ssa.OpARM64MOVBstore,
558 ssa.OpARM64MOVHstore,
559 ssa.OpARM64MOVWstore,
560 ssa.OpARM64MOVDstore,
561 ssa.OpARM64FMOVSstore,
562 ssa.OpARM64FMOVDstore,
563 ssa.OpARM64STLRB,
564 ssa.OpARM64STLR,
565 ssa.OpARM64STLRW:
566 p := s.Prog(v.Op.Asm())
567 p.From.Type = obj.TYPE_REG
568 p.From.Reg = v.Args[1].Reg()
569 p.To.Type = obj.TYPE_MEM
570 p.To.Reg = v.Args[0].Reg()
571 ssagen.AddAux(&p.To, v)
572 case ssa.OpARM64MOVBstoreidx,
573 ssa.OpARM64MOVHstoreidx,
574 ssa.OpARM64MOVWstoreidx,
575 ssa.OpARM64MOVDstoreidx,
576 ssa.OpARM64FMOVSstoreidx,
577 ssa.OpARM64FMOVDstoreidx,
578 ssa.OpARM64MOVHstoreidx2,
579 ssa.OpARM64MOVWstoreidx4,
580 ssa.OpARM64FMOVSstoreidx4,
581 ssa.OpARM64MOVDstoreidx8,
582 ssa.OpARM64FMOVDstoreidx8:
583 p := s.Prog(v.Op.Asm())
584 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
585 p.From.Type = obj.TYPE_REG
586 p.From.Reg = v.Args[2].Reg()
587 case ssa.OpARM64STP, ssa.OpARM64STPW, ssa.OpARM64FSTPD, ssa.OpARM64FSTPS:
588 p := s.Prog(v.Op.Asm())
589 p.From.Type = obj.TYPE_REGREG
590 p.From.Reg = v.Args[1].Reg()
591 p.From.Offset = int64(v.Args[2].Reg())
592 p.To.Type = obj.TYPE_MEM
593 p.To.Reg = v.Args[0].Reg()
594 ssagen.AddAux(&p.To, v)
595 case ssa.OpARM64BFI,
596 ssa.OpARM64BFXIL:
597 p := s.Prog(v.Op.Asm())
598 p.From.Type = obj.TYPE_CONST
599 p.From.Offset = v.AuxInt >> 8
600 p.AddRestSourceConst(v.AuxInt & 0xff)
601 p.Reg = v.Args[1].Reg()
602 p.To.Type = obj.TYPE_REG
603 p.To.Reg = v.Reg()
604 case ssa.OpARM64SBFIZ,
605 ssa.OpARM64SBFX,
606 ssa.OpARM64UBFIZ,
607 ssa.OpARM64UBFX:
608 p := s.Prog(v.Op.Asm())
609 p.From.Type = obj.TYPE_CONST
610 p.From.Offset = v.AuxInt >> 8
611 p.AddRestSourceConst(v.AuxInt & 0xff)
612 p.Reg = v.Args[0].Reg()
613 p.To.Type = obj.TYPE_REG
614 p.To.Reg = v.Reg()
615 case ssa.OpARM64LoweredAtomicExchange64,
616 ssa.OpARM64LoweredAtomicExchange32,
617 ssa.OpARM64LoweredAtomicExchange8:
618
619
620
621 var ld, st obj.As
622 switch v.Op {
623 case ssa.OpARM64LoweredAtomicExchange8:
624 ld = arm64.ALDAXRB
625 st = arm64.ASTLXRB
626 case ssa.OpARM64LoweredAtomicExchange32:
627 ld = arm64.ALDAXRW
628 st = arm64.ASTLXRW
629 case ssa.OpARM64LoweredAtomicExchange64:
630 ld = arm64.ALDAXR
631 st = arm64.ASTLXR
632 }
633 r0 := v.Args[0].Reg()
634 r1 := v.Args[1].Reg()
635 out := v.Reg0()
636 p := s.Prog(ld)
637 p.From.Type = obj.TYPE_MEM
638 p.From.Reg = r0
639 p.To.Type = obj.TYPE_REG
640 p.To.Reg = out
641 p1 := s.Prog(st)
642 p1.From.Type = obj.TYPE_REG
643 p1.From.Reg = r1
644 p1.To.Type = obj.TYPE_MEM
645 p1.To.Reg = r0
646 p1.RegTo2 = arm64.REGTMP
647 p2 := s.Prog(arm64.ACBNZ)
648 p2.From.Type = obj.TYPE_REG
649 p2.From.Reg = arm64.REGTMP
650 p2.To.Type = obj.TYPE_BRANCH
651 p2.To.SetTarget(p)
652 case ssa.OpARM64LoweredAtomicExchange64Variant,
653 ssa.OpARM64LoweredAtomicExchange32Variant,
654 ssa.OpARM64LoweredAtomicExchange8Variant:
655 var swap obj.As
656 switch v.Op {
657 case ssa.OpARM64LoweredAtomicExchange8Variant:
658 swap = arm64.ASWPALB
659 case ssa.OpARM64LoweredAtomicExchange32Variant:
660 swap = arm64.ASWPALW
661 case ssa.OpARM64LoweredAtomicExchange64Variant:
662 swap = arm64.ASWPALD
663 }
664 r0 := v.Args[0].Reg()
665 r1 := v.Args[1].Reg()
666 out := v.Reg0()
667
668
669 p := s.Prog(swap)
670 p.From.Type = obj.TYPE_REG
671 p.From.Reg = r1
672 p.To.Type = obj.TYPE_MEM
673 p.To.Reg = r0
674 p.RegTo2 = out
675
676 case ssa.OpARM64LoweredAtomicAdd64,
677 ssa.OpARM64LoweredAtomicAdd32:
678
679
680
681
682 ld := arm64.ALDAXR
683 st := arm64.ASTLXR
684 if v.Op == ssa.OpARM64LoweredAtomicAdd32 {
685 ld = arm64.ALDAXRW
686 st = arm64.ASTLXRW
687 }
688 r0 := v.Args[0].Reg()
689 r1 := v.Args[1].Reg()
690 out := v.Reg0()
691 p := s.Prog(ld)
692 p.From.Type = obj.TYPE_MEM
693 p.From.Reg = r0
694 p.To.Type = obj.TYPE_REG
695 p.To.Reg = out
696 p1 := s.Prog(arm64.AADD)
697 p1.From.Type = obj.TYPE_REG
698 p1.From.Reg = r1
699 p1.To.Type = obj.TYPE_REG
700 p1.To.Reg = out
701 p2 := s.Prog(st)
702 p2.From.Type = obj.TYPE_REG
703 p2.From.Reg = out
704 p2.To.Type = obj.TYPE_MEM
705 p2.To.Reg = r0
706 p2.RegTo2 = arm64.REGTMP
707 p3 := s.Prog(arm64.ACBNZ)
708 p3.From.Type = obj.TYPE_REG
709 p3.From.Reg = arm64.REGTMP
710 p3.To.Type = obj.TYPE_BRANCH
711 p3.To.SetTarget(p)
712 case ssa.OpARM64LoweredAtomicAdd64Variant,
713 ssa.OpARM64LoweredAtomicAdd32Variant:
714
715
716 op := arm64.ALDADDALD
717 if v.Op == ssa.OpARM64LoweredAtomicAdd32Variant {
718 op = arm64.ALDADDALW
719 }
720 r0 := v.Args[0].Reg()
721 r1 := v.Args[1].Reg()
722 out := v.Reg0()
723 p := s.Prog(op)
724 p.From.Type = obj.TYPE_REG
725 p.From.Reg = r1
726 p.To.Type = obj.TYPE_MEM
727 p.To.Reg = r0
728 p.RegTo2 = out
729 p1 := s.Prog(arm64.AADD)
730 p1.From.Type = obj.TYPE_REG
731 p1.From.Reg = r1
732 p1.To.Type = obj.TYPE_REG
733 p1.To.Reg = out
734 case ssa.OpARM64LoweredAtomicCas64,
735 ssa.OpARM64LoweredAtomicCas32:
736
737
738
739
740
741
742 ld := arm64.ALDAXR
743 st := arm64.ASTLXR
744 cmp := arm64.ACMP
745 if v.Op == ssa.OpARM64LoweredAtomicCas32 {
746 ld = arm64.ALDAXRW
747 st = arm64.ASTLXRW
748 cmp = arm64.ACMPW
749 }
750 r0 := v.Args[0].Reg()
751 r1 := v.Args[1].Reg()
752 r2 := v.Args[2].Reg()
753 out := v.Reg0()
754 p := s.Prog(ld)
755 p.From.Type = obj.TYPE_MEM
756 p.From.Reg = r0
757 p.To.Type = obj.TYPE_REG
758 p.To.Reg = arm64.REGTMP
759 p1 := s.Prog(cmp)
760 p1.From.Type = obj.TYPE_REG
761 p1.From.Reg = r1
762 p1.Reg = arm64.REGTMP
763 p2 := s.Prog(arm64.ABNE)
764 p2.To.Type = obj.TYPE_BRANCH
765 p3 := s.Prog(st)
766 p3.From.Type = obj.TYPE_REG
767 p3.From.Reg = r2
768 p3.To.Type = obj.TYPE_MEM
769 p3.To.Reg = r0
770 p3.RegTo2 = arm64.REGTMP
771 p4 := s.Prog(arm64.ACBNZ)
772 p4.From.Type = obj.TYPE_REG
773 p4.From.Reg = arm64.REGTMP
774 p4.To.Type = obj.TYPE_BRANCH
775 p4.To.SetTarget(p)
776 p5 := s.Prog(arm64.ACSET)
777 p5.From.Type = obj.TYPE_SPECIAL
778 p5.From.Offset = int64(arm64.SPOP_EQ)
779 p5.To.Type = obj.TYPE_REG
780 p5.To.Reg = out
781 p2.To.SetTarget(p5)
782 case ssa.OpARM64LoweredAtomicCas64Variant,
783 ssa.OpARM64LoweredAtomicCas32Variant:
784
785
786
787
788
789
790
791 cas := arm64.ACASALD
792 cmp := arm64.ACMP
793 mov := arm64.AMOVD
794 if v.Op == ssa.OpARM64LoweredAtomicCas32Variant {
795 cas = arm64.ACASALW
796 cmp = arm64.ACMPW
797 mov = arm64.AMOVW
798 }
799 r0 := v.Args[0].Reg()
800 r1 := v.Args[1].Reg()
801 r2 := v.Args[2].Reg()
802 out := v.Reg0()
803
804
805 p := s.Prog(mov)
806 p.From.Type = obj.TYPE_REG
807 p.From.Reg = r1
808 p.To.Type = obj.TYPE_REG
809 p.To.Reg = arm64.REGTMP
810
811
812 p1 := s.Prog(cas)
813 p1.From.Type = obj.TYPE_REG
814 p1.From.Reg = arm64.REGTMP
815 p1.To.Type = obj.TYPE_MEM
816 p1.To.Reg = r0
817 p1.RegTo2 = r2
818
819
820 p2 := s.Prog(cmp)
821 p2.From.Type = obj.TYPE_REG
822 p2.From.Reg = r1
823 p2.Reg = arm64.REGTMP
824
825
826 p3 := s.Prog(arm64.ACSET)
827 p3.From.Type = obj.TYPE_SPECIAL
828 p3.From.Offset = int64(arm64.SPOP_EQ)
829 p3.To.Type = obj.TYPE_REG
830 p3.To.Reg = out
831
832 case ssa.OpARM64LoweredAtomicAnd64,
833 ssa.OpARM64LoweredAtomicOr64,
834 ssa.OpARM64LoweredAtomicAnd32,
835 ssa.OpARM64LoweredAtomicOr32,
836 ssa.OpARM64LoweredAtomicAnd8,
837 ssa.OpARM64LoweredAtomicOr8:
838
839
840
841
842 ld := arm64.ALDAXR
843 st := arm64.ASTLXR
844 if v.Op == ssa.OpARM64LoweredAtomicAnd32 || v.Op == ssa.OpARM64LoweredAtomicOr32 {
845 ld = arm64.ALDAXRW
846 st = arm64.ASTLXRW
847 }
848 if v.Op == ssa.OpARM64LoweredAtomicAnd8 || v.Op == ssa.OpARM64LoweredAtomicOr8 {
849 ld = arm64.ALDAXRB
850 st = arm64.ASTLXRB
851 }
852 r0 := v.Args[0].Reg()
853 r1 := v.Args[1].Reg()
854 out := v.Reg0()
855 tmp := v.RegTmp()
856 p := s.Prog(ld)
857 p.From.Type = obj.TYPE_MEM
858 p.From.Reg = r0
859 p.To.Type = obj.TYPE_REG
860 p.To.Reg = out
861 p1 := s.Prog(v.Op.Asm())
862 p1.From.Type = obj.TYPE_REG
863 p1.From.Reg = r1
864 p1.Reg = out
865 p1.To.Type = obj.TYPE_REG
866 p1.To.Reg = tmp
867 p2 := s.Prog(st)
868 p2.From.Type = obj.TYPE_REG
869 p2.From.Reg = tmp
870 p2.To.Type = obj.TYPE_MEM
871 p2.To.Reg = r0
872 p2.RegTo2 = arm64.REGTMP
873 p3 := s.Prog(arm64.ACBNZ)
874 p3.From.Type = obj.TYPE_REG
875 p3.From.Reg = arm64.REGTMP
876 p3.To.Type = obj.TYPE_BRANCH
877 p3.To.SetTarget(p)
878
879 case ssa.OpARM64LoweredAtomicAnd8Variant,
880 ssa.OpARM64LoweredAtomicAnd32Variant,
881 ssa.OpARM64LoweredAtomicAnd64Variant:
882 atomic_clear := arm64.ALDCLRALD
883 if v.Op == ssa.OpARM64LoweredAtomicAnd32Variant {
884 atomic_clear = arm64.ALDCLRALW
885 }
886 if v.Op == ssa.OpARM64LoweredAtomicAnd8Variant {
887 atomic_clear = arm64.ALDCLRALB
888 }
889 r0 := v.Args[0].Reg()
890 r1 := v.Args[1].Reg()
891 out := v.Reg0()
892
893
894 p := s.Prog(arm64.AMVN)
895 p.From.Type = obj.TYPE_REG
896 p.From.Reg = r1
897 p.To.Type = obj.TYPE_REG
898 p.To.Reg = arm64.REGTMP
899
900
901 p1 := s.Prog(atomic_clear)
902 p1.From.Type = obj.TYPE_REG
903 p1.From.Reg = arm64.REGTMP
904 p1.To.Type = obj.TYPE_MEM
905 p1.To.Reg = r0
906 p1.RegTo2 = out
907
908 case ssa.OpARM64LoweredAtomicOr8Variant,
909 ssa.OpARM64LoweredAtomicOr32Variant,
910 ssa.OpARM64LoweredAtomicOr64Variant:
911 atomic_or := arm64.ALDORALD
912 if v.Op == ssa.OpARM64LoweredAtomicOr32Variant {
913 atomic_or = arm64.ALDORALW
914 }
915 if v.Op == ssa.OpARM64LoweredAtomicOr8Variant {
916 atomic_or = arm64.ALDORALB
917 }
918 r0 := v.Args[0].Reg()
919 r1 := v.Args[1].Reg()
920 out := v.Reg0()
921
922
923 p := s.Prog(atomic_or)
924 p.From.Type = obj.TYPE_REG
925 p.From.Reg = r1
926 p.To.Type = obj.TYPE_MEM
927 p.To.Reg = r0
928 p.RegTo2 = out
929
930 case ssa.OpARM64MOVBreg,
931 ssa.OpARM64MOVBUreg,
932 ssa.OpARM64MOVHreg,
933 ssa.OpARM64MOVHUreg,
934 ssa.OpARM64MOVWreg,
935 ssa.OpARM64MOVWUreg:
936 a := v.Args[0]
937 for a.Op == ssa.OpCopy || a.Op == ssa.OpARM64MOVDreg {
938 a = a.Args[0]
939 }
940 if a.Op == ssa.OpLoadReg {
941 t := a.Type
942 switch {
943 case v.Op == ssa.OpARM64MOVBreg && t.Size() == 1 && t.IsSigned(),
944 v.Op == ssa.OpARM64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
945 v.Op == ssa.OpARM64MOVHreg && t.Size() == 2 && t.IsSigned(),
946 v.Op == ssa.OpARM64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
947 v.Op == ssa.OpARM64MOVWreg && t.Size() == 4 && t.IsSigned(),
948 v.Op == ssa.OpARM64MOVWUreg && t.Size() == 4 && !t.IsSigned():
949
950 if v.Reg() == v.Args[0].Reg() {
951 return
952 }
953 p := s.Prog(arm64.AMOVD)
954 p.From.Type = obj.TYPE_REG
955 p.From.Reg = v.Args[0].Reg()
956 p.To.Type = obj.TYPE_REG
957 p.To.Reg = v.Reg()
958 return
959 default:
960 }
961 }
962 fallthrough
963 case ssa.OpARM64MVN,
964 ssa.OpARM64NEG,
965 ssa.OpARM64FABSD,
966 ssa.OpARM64FMOVDfpgp,
967 ssa.OpARM64FMOVDgpfp,
968 ssa.OpARM64FMOVSfpgp,
969 ssa.OpARM64FMOVSgpfp,
970 ssa.OpARM64FNEGS,
971 ssa.OpARM64FNEGD,
972 ssa.OpARM64FSQRTS,
973 ssa.OpARM64FSQRTD,
974 ssa.OpARM64FCVTZSSW,
975 ssa.OpARM64FCVTZSDW,
976 ssa.OpARM64FCVTZUSW,
977 ssa.OpARM64FCVTZUDW,
978 ssa.OpARM64FCVTZSS,
979 ssa.OpARM64FCVTZSD,
980 ssa.OpARM64FCVTZUS,
981 ssa.OpARM64FCVTZUD,
982 ssa.OpARM64SCVTFWS,
983 ssa.OpARM64SCVTFWD,
984 ssa.OpARM64SCVTFS,
985 ssa.OpARM64SCVTFD,
986 ssa.OpARM64UCVTFWS,
987 ssa.OpARM64UCVTFWD,
988 ssa.OpARM64UCVTFS,
989 ssa.OpARM64UCVTFD,
990 ssa.OpARM64FCVTSD,
991 ssa.OpARM64FCVTDS,
992 ssa.OpARM64REV,
993 ssa.OpARM64REVW,
994 ssa.OpARM64REV16,
995 ssa.OpARM64REV16W,
996 ssa.OpARM64RBIT,
997 ssa.OpARM64RBITW,
998 ssa.OpARM64CLZ,
999 ssa.OpARM64CLZW,
1000 ssa.OpARM64FRINTAD,
1001 ssa.OpARM64FRINTMD,
1002 ssa.OpARM64FRINTND,
1003 ssa.OpARM64FRINTPD,
1004 ssa.OpARM64FRINTZD:
1005 p := s.Prog(v.Op.Asm())
1006 p.From.Type = obj.TYPE_REG
1007 p.From.Reg = v.Args[0].Reg()
1008 p.To.Type = obj.TYPE_REG
1009 p.To.Reg = v.Reg()
1010 case ssa.OpARM64LoweredRound32F, ssa.OpARM64LoweredRound64F:
1011
1012 case ssa.OpARM64VCNT:
1013 p := s.Prog(v.Op.Asm())
1014 p.From.Type = obj.TYPE_REG
1015 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1016 p.To.Type = obj.TYPE_REG
1017 p.To.Reg = (v.Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1018 case ssa.OpARM64VUADDLV:
1019 p := s.Prog(v.Op.Asm())
1020 p.From.Type = obj.TYPE_REG
1021 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
1022 p.To.Type = obj.TYPE_REG
1023 p.To.Reg = v.Reg() - arm64.REG_F0 + arm64.REG_V0
1024 case ssa.OpARM64CSEL, ssa.OpARM64CSEL0:
1025 r1 := int16(arm64.REGZERO)
1026 if v.Op != ssa.OpARM64CSEL0 {
1027 r1 = v.Args[1].Reg()
1028 }
1029 p := s.Prog(v.Op.Asm())
1030 p.From.Type = obj.TYPE_SPECIAL
1031 condCode := condBits[ssa.Op(v.AuxInt)]
1032 p.From.Offset = int64(condCode)
1033 p.Reg = v.Args[0].Reg()
1034 p.AddRestSourceReg(r1)
1035 p.To.Type = obj.TYPE_REG
1036 p.To.Reg = v.Reg()
1037 case ssa.OpARM64CSINC, ssa.OpARM64CSINV, ssa.OpARM64CSNEG:
1038 p := s.Prog(v.Op.Asm())
1039 p.From.Type = obj.TYPE_SPECIAL
1040 condCode := condBits[ssa.Op(v.AuxInt)]
1041 p.From.Offset = int64(condCode)
1042 p.Reg = v.Args[0].Reg()
1043 p.AddRestSourceReg(v.Args[1].Reg())
1044 p.To.Type = obj.TYPE_REG
1045 p.To.Reg = v.Reg()
1046 case ssa.OpARM64CSETM:
1047 p := s.Prog(arm64.ACSETM)
1048 p.From.Type = obj.TYPE_SPECIAL
1049 condCode := condBits[ssa.Op(v.AuxInt)]
1050 p.From.Offset = int64(condCode)
1051 p.To.Type = obj.TYPE_REG
1052 p.To.Reg = v.Reg()
1053 case ssa.OpARM64DUFFZERO:
1054
1055 p := s.Prog(obj.ADUFFZERO)
1056 p.To.Type = obj.TYPE_MEM
1057 p.To.Name = obj.NAME_EXTERN
1058 p.To.Sym = ir.Syms.Duffzero
1059 p.To.Offset = v.AuxInt
1060 case ssa.OpARM64LoweredZero:
1061
1062
1063
1064
1065 p := s.Prog(arm64.ASTP)
1066 p.Scond = arm64.C_XPOST
1067 p.From.Type = obj.TYPE_REGREG
1068 p.From.Reg = arm64.REGZERO
1069 p.From.Offset = int64(arm64.REGZERO)
1070 p.To.Type = obj.TYPE_MEM
1071 p.To.Reg = arm64.REG_R16
1072 p.To.Offset = 16
1073 p2 := s.Prog(arm64.ACMP)
1074 p2.From.Type = obj.TYPE_REG
1075 p2.From.Reg = v.Args[1].Reg()
1076 p2.Reg = arm64.REG_R16
1077 p3 := s.Prog(arm64.ABLE)
1078 p3.To.Type = obj.TYPE_BRANCH
1079 p3.To.SetTarget(p)
1080 case ssa.OpARM64DUFFCOPY:
1081 p := s.Prog(obj.ADUFFCOPY)
1082 p.To.Type = obj.TYPE_MEM
1083 p.To.Name = obj.NAME_EXTERN
1084 p.To.Sym = ir.Syms.Duffcopy
1085 p.To.Offset = v.AuxInt
1086 case ssa.OpARM64LoweredMove:
1087
1088
1089
1090
1091
1092 p := s.Prog(arm64.ALDP)
1093 p.Scond = arm64.C_XPOST
1094 p.From.Type = obj.TYPE_MEM
1095 p.From.Reg = arm64.REG_R16
1096 p.From.Offset = 16
1097 p.To.Type = obj.TYPE_REGREG
1098 p.To.Reg = arm64.REG_R25
1099 p.To.Offset = int64(arm64.REGTMP)
1100 p2 := s.Prog(arm64.ASTP)
1101 p2.Scond = arm64.C_XPOST
1102 p2.From.Type = obj.TYPE_REGREG
1103 p2.From.Reg = arm64.REG_R25
1104 p2.From.Offset = int64(arm64.REGTMP)
1105 p2.To.Type = obj.TYPE_MEM
1106 p2.To.Reg = arm64.REG_R17
1107 p2.To.Offset = 16
1108 p3 := s.Prog(arm64.ACMP)
1109 p3.From.Type = obj.TYPE_REG
1110 p3.From.Reg = v.Args[2].Reg()
1111 p3.Reg = arm64.REG_R16
1112 p4 := s.Prog(arm64.ABLE)
1113 p4.To.Type = obj.TYPE_BRANCH
1114 p4.To.SetTarget(p)
1115 case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
1116 s.Call(v)
1117 case ssa.OpARM64CALLtail:
1118 s.TailCall(v)
1119 case ssa.OpARM64LoweredWB:
1120 p := s.Prog(obj.ACALL)
1121 p.To.Type = obj.TYPE_MEM
1122 p.To.Name = obj.NAME_EXTERN
1123
1124 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
1125
1126 case ssa.OpARM64LoweredPanicBoundsRR, ssa.OpARM64LoweredPanicBoundsRC, ssa.OpARM64LoweredPanicBoundsCR, ssa.OpARM64LoweredPanicBoundsCC:
1127
1128 code, signed := ssa.BoundsKind(v.AuxInt).Code()
1129 xIsReg := false
1130 yIsReg := false
1131 xVal := 0
1132 yVal := 0
1133 switch v.Op {
1134 case ssa.OpARM64LoweredPanicBoundsRR:
1135 xIsReg = true
1136 xVal = int(v.Args[0].Reg() - arm64.REG_R0)
1137 yIsReg = true
1138 yVal = int(v.Args[1].Reg() - arm64.REG_R0)
1139 case ssa.OpARM64LoweredPanicBoundsRC:
1140 xIsReg = true
1141 xVal = int(v.Args[0].Reg() - arm64.REG_R0)
1142 c := v.Aux.(ssa.PanicBoundsC).C
1143 if c >= 0 && c <= abi.BoundsMaxConst {
1144 yVal = int(c)
1145 } else {
1146
1147 yIsReg = true
1148 if yVal == xVal {
1149 yVal = 1
1150 }
1151 p := s.Prog(arm64.AMOVD)
1152 p.From.Type = obj.TYPE_CONST
1153 p.From.Offset = c
1154 p.To.Type = obj.TYPE_REG
1155 p.To.Reg = arm64.REG_R0 + int16(yVal)
1156 }
1157 case ssa.OpARM64LoweredPanicBoundsCR:
1158 yIsReg = true
1159 yVal := int(v.Args[0].Reg() - arm64.REG_R0)
1160 c := v.Aux.(ssa.PanicBoundsC).C
1161 if c >= 0 && c <= abi.BoundsMaxConst {
1162 xVal = int(c)
1163 } else {
1164
1165 if xVal == yVal {
1166 xVal = 1
1167 }
1168 p := s.Prog(arm64.AMOVD)
1169 p.From.Type = obj.TYPE_CONST
1170 p.From.Offset = c
1171 p.To.Type = obj.TYPE_REG
1172 p.To.Reg = arm64.REG_R0 + int16(xVal)
1173 }
1174 case ssa.OpARM64LoweredPanicBoundsCC:
1175 c := v.Aux.(ssa.PanicBoundsCC).Cx
1176 if c >= 0 && c <= abi.BoundsMaxConst {
1177 xVal = int(c)
1178 } else {
1179
1180 xIsReg = true
1181 p := s.Prog(arm64.AMOVD)
1182 p.From.Type = obj.TYPE_CONST
1183 p.From.Offset = c
1184 p.To.Type = obj.TYPE_REG
1185 p.To.Reg = arm64.REG_R0 + int16(xVal)
1186 }
1187 c = v.Aux.(ssa.PanicBoundsCC).Cy
1188 if c >= 0 && c <= abi.BoundsMaxConst {
1189 yVal = int(c)
1190 } else {
1191
1192 yIsReg = true
1193 yVal = 1
1194 p := s.Prog(arm64.AMOVD)
1195 p.From.Type = obj.TYPE_CONST
1196 p.From.Offset = c
1197 p.To.Type = obj.TYPE_REG
1198 p.To.Reg = arm64.REG_R0 + int16(yVal)
1199 }
1200 }
1201 c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
1202
1203 p := s.Prog(obj.APCDATA)
1204 p.From.SetConst(abi.PCDATA_PanicBounds)
1205 p.To.SetConst(int64(c))
1206 p = s.Prog(obj.ACALL)
1207 p.To.Type = obj.TYPE_MEM
1208 p.To.Name = obj.NAME_EXTERN
1209 p.To.Sym = ir.Syms.PanicBounds
1210
1211 case ssa.OpARM64LoweredNilCheck:
1212
1213 p := s.Prog(arm64.AMOVB)
1214 p.From.Type = obj.TYPE_MEM
1215 p.From.Reg = v.Args[0].Reg()
1216 ssagen.AddAux(&p.From, v)
1217 p.To.Type = obj.TYPE_REG
1218 p.To.Reg = arm64.REGTMP
1219 if logopt.Enabled() {
1220 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
1221 }
1222 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
1223 base.WarnfAt(v.Pos, "generated nil check")
1224 }
1225 case ssa.OpARM64Equal,
1226 ssa.OpARM64NotEqual,
1227 ssa.OpARM64LessThan,
1228 ssa.OpARM64LessEqual,
1229 ssa.OpARM64GreaterThan,
1230 ssa.OpARM64GreaterEqual,
1231 ssa.OpARM64LessThanU,
1232 ssa.OpARM64LessEqualU,
1233 ssa.OpARM64GreaterThanU,
1234 ssa.OpARM64GreaterEqualU,
1235 ssa.OpARM64LessThanF,
1236 ssa.OpARM64LessEqualF,
1237 ssa.OpARM64GreaterThanF,
1238 ssa.OpARM64GreaterEqualF,
1239 ssa.OpARM64NotLessThanF,
1240 ssa.OpARM64NotLessEqualF,
1241 ssa.OpARM64NotGreaterThanF,
1242 ssa.OpARM64NotGreaterEqualF,
1243 ssa.OpARM64LessThanNoov,
1244 ssa.OpARM64GreaterEqualNoov:
1245
1246 p := s.Prog(arm64.ACSET)
1247 p.From.Type = obj.TYPE_SPECIAL
1248 condCode := condBits[v.Op]
1249 p.From.Offset = int64(condCode)
1250 p.To.Type = obj.TYPE_REG
1251 p.To.Reg = v.Reg()
1252 case ssa.OpARM64PRFM:
1253 p := s.Prog(v.Op.Asm())
1254 p.From.Type = obj.TYPE_MEM
1255 p.From.Reg = v.Args[0].Reg()
1256 p.To.Type = obj.TYPE_CONST
1257 p.To.Offset = v.AuxInt
1258 case ssa.OpARM64LoweredGetClosurePtr:
1259
1260 ssagen.CheckLoweredGetClosurePtr(v)
1261 case ssa.OpARM64LoweredGetCallerSP:
1262
1263 p := s.Prog(arm64.AMOVD)
1264 p.From.Type = obj.TYPE_ADDR
1265 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
1266 p.From.Name = obj.NAME_PARAM
1267 p.To.Type = obj.TYPE_REG
1268 p.To.Reg = v.Reg()
1269 case ssa.OpARM64LoweredGetCallerPC:
1270 p := s.Prog(obj.AGETCALLERPC)
1271 p.To.Type = obj.TYPE_REG
1272 p.To.Reg = v.Reg()
1273 case ssa.OpARM64DMB:
1274 p := s.Prog(v.Op.Asm())
1275 p.From.Type = obj.TYPE_CONST
1276 p.From.Offset = v.AuxInt
1277 case ssa.OpARM64FlagConstant:
1278 v.Fatalf("FlagConstant op should never make it to codegen %v", v.LongString())
1279 case ssa.OpARM64InvertFlags:
1280 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
1281 case ssa.OpClobber:
1282
1283
1284
1285 p := s.Prog(arm64.AMOVW)
1286 p.From.Type = obj.TYPE_CONST
1287 p.From.Offset = 0xdeaddead
1288 p.To.Type = obj.TYPE_REG
1289 p.To.Reg = arm64.REGTMP
1290 p = s.Prog(arm64.AMOVW)
1291 p.From.Type = obj.TYPE_REG
1292 p.From.Reg = arm64.REGTMP
1293 p.To.Type = obj.TYPE_MEM
1294 p.To.Reg = arm64.REGSP
1295 ssagen.AddAux(&p.To, v)
1296 p = s.Prog(arm64.AMOVW)
1297 p.From.Type = obj.TYPE_REG
1298 p.From.Reg = arm64.REGTMP
1299 p.To.Type = obj.TYPE_MEM
1300 p.To.Reg = arm64.REGSP
1301 ssagen.AddAux2(&p.To, v, v.AuxInt+4)
1302 case ssa.OpClobberReg:
1303 x := uint64(0xdeaddeaddeaddead)
1304 p := s.Prog(arm64.AMOVD)
1305 p.From.Type = obj.TYPE_CONST
1306 p.From.Offset = int64(x)
1307 p.To.Type = obj.TYPE_REG
1308 p.To.Reg = v.Reg()
1309 default:
1310 v.Fatalf("genValue not implemented: %s", v.LongString())
1311 }
1312 }
1313
1314 var condBits = map[ssa.Op]arm64.SpecialOperand{
1315 ssa.OpARM64Equal: arm64.SPOP_EQ,
1316 ssa.OpARM64NotEqual: arm64.SPOP_NE,
1317 ssa.OpARM64LessThan: arm64.SPOP_LT,
1318 ssa.OpARM64LessThanU: arm64.SPOP_LO,
1319 ssa.OpARM64LessEqual: arm64.SPOP_LE,
1320 ssa.OpARM64LessEqualU: arm64.SPOP_LS,
1321 ssa.OpARM64GreaterThan: arm64.SPOP_GT,
1322 ssa.OpARM64GreaterThanU: arm64.SPOP_HI,
1323 ssa.OpARM64GreaterEqual: arm64.SPOP_GE,
1324 ssa.OpARM64GreaterEqualU: arm64.SPOP_HS,
1325 ssa.OpARM64LessThanF: arm64.SPOP_MI,
1326 ssa.OpARM64LessEqualF: arm64.SPOP_LS,
1327 ssa.OpARM64GreaterThanF: arm64.SPOP_GT,
1328 ssa.OpARM64GreaterEqualF: arm64.SPOP_GE,
1329
1330
1331 ssa.OpARM64NotLessThanF: arm64.SPOP_PL,
1332 ssa.OpARM64NotLessEqualF: arm64.SPOP_HI,
1333 ssa.OpARM64NotGreaterThanF: arm64.SPOP_LE,
1334 ssa.OpARM64NotGreaterEqualF: arm64.SPOP_LT,
1335
1336 ssa.OpARM64LessThanNoov: arm64.SPOP_MI,
1337 ssa.OpARM64GreaterEqualNoov: arm64.SPOP_PL,
1338 }
1339
1340 var blockJump = map[ssa.BlockKind]struct {
1341 asm, invasm obj.As
1342 }{
1343 ssa.BlockARM64EQ: {arm64.ABEQ, arm64.ABNE},
1344 ssa.BlockARM64NE: {arm64.ABNE, arm64.ABEQ},
1345 ssa.BlockARM64LT: {arm64.ABLT, arm64.ABGE},
1346 ssa.BlockARM64GE: {arm64.ABGE, arm64.ABLT},
1347 ssa.BlockARM64LE: {arm64.ABLE, arm64.ABGT},
1348 ssa.BlockARM64GT: {arm64.ABGT, arm64.ABLE},
1349 ssa.BlockARM64ULT: {arm64.ABLO, arm64.ABHS},
1350 ssa.BlockARM64UGE: {arm64.ABHS, arm64.ABLO},
1351 ssa.BlockARM64UGT: {arm64.ABHI, arm64.ABLS},
1352 ssa.BlockARM64ULE: {arm64.ABLS, arm64.ABHI},
1353 ssa.BlockARM64Z: {arm64.ACBZ, arm64.ACBNZ},
1354 ssa.BlockARM64NZ: {arm64.ACBNZ, arm64.ACBZ},
1355 ssa.BlockARM64ZW: {arm64.ACBZW, arm64.ACBNZW},
1356 ssa.BlockARM64NZW: {arm64.ACBNZW, arm64.ACBZW},
1357 ssa.BlockARM64TBZ: {arm64.ATBZ, arm64.ATBNZ},
1358 ssa.BlockARM64TBNZ: {arm64.ATBNZ, arm64.ATBZ},
1359 ssa.BlockARM64FLT: {arm64.ABMI, arm64.ABPL},
1360 ssa.BlockARM64FGE: {arm64.ABGE, arm64.ABLT},
1361 ssa.BlockARM64FLE: {arm64.ABLS, arm64.ABHI},
1362 ssa.BlockARM64FGT: {arm64.ABGT, arm64.ABLE},
1363 ssa.BlockARM64LTnoov: {arm64.ABMI, arm64.ABPL},
1364 ssa.BlockARM64GEnoov: {arm64.ABPL, arm64.ABMI},
1365 }
1366
1367
1368 var leJumps = [2][2]ssagen.IndexJump{
1369 {{Jump: arm64.ABEQ, Index: 0}, {Jump: arm64.ABPL, Index: 1}},
1370 {{Jump: arm64.ABMI, Index: 0}, {Jump: arm64.ABEQ, Index: 0}},
1371 }
1372
1373
1374 var gtJumps = [2][2]ssagen.IndexJump{
1375 {{Jump: arm64.ABMI, Index: 1}, {Jump: arm64.ABEQ, Index: 1}},
1376 {{Jump: arm64.ABEQ, Index: 1}, {Jump: arm64.ABPL, Index: 0}},
1377 }
1378
1379 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
1380 switch b.Kind {
1381 case ssa.BlockPlain, ssa.BlockDefer:
1382 if b.Succs[0].Block() != next {
1383 p := s.Prog(obj.AJMP)
1384 p.To.Type = obj.TYPE_BRANCH
1385 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1386 }
1387
1388 case ssa.BlockExit, ssa.BlockRetJmp:
1389
1390 case ssa.BlockRet:
1391 s.Prog(obj.ARET)
1392
1393 case ssa.BlockARM64EQ, ssa.BlockARM64NE,
1394 ssa.BlockARM64LT, ssa.BlockARM64GE,
1395 ssa.BlockARM64LE, ssa.BlockARM64GT,
1396 ssa.BlockARM64ULT, ssa.BlockARM64UGT,
1397 ssa.BlockARM64ULE, ssa.BlockARM64UGE,
1398 ssa.BlockARM64Z, ssa.BlockARM64NZ,
1399 ssa.BlockARM64ZW, ssa.BlockARM64NZW,
1400 ssa.BlockARM64FLT, ssa.BlockARM64FGE,
1401 ssa.BlockARM64FLE, ssa.BlockARM64FGT,
1402 ssa.BlockARM64LTnoov, ssa.BlockARM64GEnoov:
1403 jmp := blockJump[b.Kind]
1404 var p *obj.Prog
1405 switch next {
1406 case b.Succs[0].Block():
1407 p = s.Br(jmp.invasm, b.Succs[1].Block())
1408 case b.Succs[1].Block():
1409 p = s.Br(jmp.asm, b.Succs[0].Block())
1410 default:
1411 if b.Likely != ssa.BranchUnlikely {
1412 p = s.Br(jmp.asm, b.Succs[0].Block())
1413 s.Br(obj.AJMP, b.Succs[1].Block())
1414 } else {
1415 p = s.Br(jmp.invasm, b.Succs[1].Block())
1416 s.Br(obj.AJMP, b.Succs[0].Block())
1417 }
1418 }
1419 if !b.Controls[0].Type.IsFlags() {
1420 p.From.Type = obj.TYPE_REG
1421 p.From.Reg = b.Controls[0].Reg()
1422 }
1423 case ssa.BlockARM64TBZ, ssa.BlockARM64TBNZ:
1424 jmp := blockJump[b.Kind]
1425 var p *obj.Prog
1426 switch next {
1427 case b.Succs[0].Block():
1428 p = s.Br(jmp.invasm, b.Succs[1].Block())
1429 case b.Succs[1].Block():
1430 p = s.Br(jmp.asm, b.Succs[0].Block())
1431 default:
1432 if b.Likely != ssa.BranchUnlikely {
1433 p = s.Br(jmp.asm, b.Succs[0].Block())
1434 s.Br(obj.AJMP, b.Succs[1].Block())
1435 } else {
1436 p = s.Br(jmp.invasm, b.Succs[1].Block())
1437 s.Br(obj.AJMP, b.Succs[0].Block())
1438 }
1439 }
1440 p.From.Offset = b.AuxInt
1441 p.From.Type = obj.TYPE_CONST
1442 p.Reg = b.Controls[0].Reg()
1443
1444 case ssa.BlockARM64LEnoov:
1445 s.CombJump(b, next, &leJumps)
1446 case ssa.BlockARM64GTnoov:
1447 s.CombJump(b, next, >Jumps)
1448
1449 case ssa.BlockARM64JUMPTABLE:
1450
1451
1452 p := s.Prog(arm64.AMOVD)
1453 p.From = genIndexedOperand(ssa.OpARM64MOVDloadidx8, b.Controls[1].Reg(), b.Controls[0].Reg())
1454 p.To.Type = obj.TYPE_REG
1455 p.To.Reg = arm64.REGTMP
1456 p = s.Prog(obj.AJMP)
1457 p.To.Type = obj.TYPE_MEM
1458 p.To.Reg = arm64.REGTMP
1459
1460 s.JumpTables = append(s.JumpTables, b)
1461
1462 default:
1463 b.Fatalf("branch not implemented: %s", b.LongString())
1464 }
1465 }
1466
1467 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1468 p := s.Prog(loadByType(t))
1469 p.From.Type = obj.TYPE_MEM
1470 p.From.Name = obj.NAME_AUTO
1471 p.From.Sym = n.Linksym()
1472 p.From.Offset = n.FrameOffset() + off
1473 p.To.Type = obj.TYPE_REG
1474 p.To.Reg = reg
1475 return p
1476 }
1477
1478 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1479 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1480 p.To.Name = obj.NAME_PARAM
1481 p.To.Sym = n.Linksym()
1482 p.Pos = p.Pos.WithNotStmt()
1483 return p
1484 }
1485
View as plain text