1
2
3
4
5 package arm64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/objw"
14 "cmd/compile/internal/ssa"
15 "cmd/compile/internal/ssagen"
16 "cmd/compile/internal/types"
17 "cmd/internal/obj"
18 "cmd/internal/obj/arm64"
19 )
20
21
22 func loadByType(t *types.Type) obj.As {
23 if t.IsFloat() {
24 switch t.Size() {
25 case 4:
26 return arm64.AFMOVS
27 case 8:
28 return arm64.AFMOVD
29 }
30 } else {
31 switch t.Size() {
32 case 1:
33 if t.IsSigned() {
34 return arm64.AMOVB
35 } else {
36 return arm64.AMOVBU
37 }
38 case 2:
39 if t.IsSigned() {
40 return arm64.AMOVH
41 } else {
42 return arm64.AMOVHU
43 }
44 case 4:
45 if t.IsSigned() {
46 return arm64.AMOVW
47 } else {
48 return arm64.AMOVWU
49 }
50 case 8:
51 return arm64.AMOVD
52 }
53 }
54 panic("bad load type")
55 }
56
57
58 func storeByType(t *types.Type) obj.As {
59 if t.IsFloat() {
60 switch t.Size() {
61 case 4:
62 return arm64.AFMOVS
63 case 8:
64 return arm64.AFMOVD
65 }
66 } else {
67 switch t.Size() {
68 case 1:
69 return arm64.AMOVB
70 case 2:
71 return arm64.AMOVH
72 case 4:
73 return arm64.AMOVW
74 case 8:
75 return arm64.AMOVD
76 }
77 }
78 panic("bad store type")
79 }
80
81
82 func makeshift(v *ssa.Value, reg int16, typ int64, s int64) int64 {
83 if s < 0 || s >= 64 {
84 v.Fatalf("shift out of range: %d", s)
85 }
86 return int64(reg&31)<<16 | typ | (s&63)<<10
87 }
88
89
90 func genshift(s *ssagen.State, v *ssa.Value, as obj.As, r0, r1, r int16, typ int64, n int64) *obj.Prog {
91 p := s.Prog(as)
92 p.From.Type = obj.TYPE_SHIFT
93 p.From.Offset = makeshift(v, r1, typ, n)
94 p.Reg = r0
95 if r != 0 {
96 p.To.Type = obj.TYPE_REG
97 p.To.Reg = r
98 }
99 return p
100 }
101
102
103
104 func genIndexedOperand(op ssa.Op, base, idx int16) obj.Addr {
105
106 mop := obj.Addr{Type: obj.TYPE_MEM, Reg: base}
107 switch op {
108 case ssa.OpARM64MOVDloadidx8, ssa.OpARM64MOVDstoreidx8, ssa.OpARM64MOVDstorezeroidx8,
109 ssa.OpARM64FMOVDloadidx8, ssa.OpARM64FMOVDstoreidx8:
110 mop.Index = arm64.REG_LSL | 3<<5 | idx&31
111 case ssa.OpARM64MOVWloadidx4, ssa.OpARM64MOVWUloadidx4, ssa.OpARM64MOVWstoreidx4, ssa.OpARM64MOVWstorezeroidx4,
112 ssa.OpARM64FMOVSloadidx4, ssa.OpARM64FMOVSstoreidx4:
113 mop.Index = arm64.REG_LSL | 2<<5 | idx&31
114 case ssa.OpARM64MOVHloadidx2, ssa.OpARM64MOVHUloadidx2, ssa.OpARM64MOVHstoreidx2, ssa.OpARM64MOVHstorezeroidx2:
115 mop.Index = arm64.REG_LSL | 1<<5 | idx&31
116 default:
117 mop.Index = idx
118 }
119 return mop
120 }
121
122 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
123 switch v.Op {
124 case ssa.OpCopy, ssa.OpARM64MOVDreg:
125 if v.Type.IsMemory() {
126 return
127 }
128 x := v.Args[0].Reg()
129 y := v.Reg()
130 if x == y {
131 return
132 }
133 as := arm64.AMOVD
134 if v.Type.IsFloat() {
135 switch v.Type.Size() {
136 case 4:
137 as = arm64.AFMOVS
138 case 8:
139 as = arm64.AFMOVD
140 default:
141 panic("bad float size")
142 }
143 }
144 p := s.Prog(as)
145 p.From.Type = obj.TYPE_REG
146 p.From.Reg = x
147 p.To.Type = obj.TYPE_REG
148 p.To.Reg = y
149 case ssa.OpARM64MOVDnop:
150
151 case ssa.OpLoadReg:
152 if v.Type.IsFlags() {
153 v.Fatalf("load flags not implemented: %v", v.LongString())
154 return
155 }
156 p := s.Prog(loadByType(v.Type))
157 ssagen.AddrAuto(&p.From, v.Args[0])
158 p.To.Type = obj.TYPE_REG
159 p.To.Reg = v.Reg()
160 case ssa.OpStoreReg:
161 if v.Type.IsFlags() {
162 v.Fatalf("store flags not implemented: %v", v.LongString())
163 return
164 }
165 p := s.Prog(storeByType(v.Type))
166 p.From.Type = obj.TYPE_REG
167 p.From.Reg = v.Args[0].Reg()
168 ssagen.AddrAuto(&p.To, v)
169 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
170
171
172 for _, a := range v.Block.Func.RegArgs {
173
174
175 addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.Arch.FixedFrameSize)
176 s.FuncInfo().AddSpill(
177 obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
178 }
179 v.Block.Func.RegArgs = nil
180 ssagen.CheckArgReg(v)
181 case ssa.OpARM64ADD,
182 ssa.OpARM64SUB,
183 ssa.OpARM64AND,
184 ssa.OpARM64OR,
185 ssa.OpARM64XOR,
186 ssa.OpARM64BIC,
187 ssa.OpARM64EON,
188 ssa.OpARM64ORN,
189 ssa.OpARM64MUL,
190 ssa.OpARM64MULW,
191 ssa.OpARM64MNEG,
192 ssa.OpARM64MNEGW,
193 ssa.OpARM64MULH,
194 ssa.OpARM64UMULH,
195 ssa.OpARM64MULL,
196 ssa.OpARM64UMULL,
197 ssa.OpARM64DIV,
198 ssa.OpARM64UDIV,
199 ssa.OpARM64DIVW,
200 ssa.OpARM64UDIVW,
201 ssa.OpARM64MOD,
202 ssa.OpARM64UMOD,
203 ssa.OpARM64MODW,
204 ssa.OpARM64UMODW,
205 ssa.OpARM64SLL,
206 ssa.OpARM64SRL,
207 ssa.OpARM64SRA,
208 ssa.OpARM64FADDS,
209 ssa.OpARM64FADDD,
210 ssa.OpARM64FSUBS,
211 ssa.OpARM64FSUBD,
212 ssa.OpARM64FMULS,
213 ssa.OpARM64FMULD,
214 ssa.OpARM64FNMULS,
215 ssa.OpARM64FNMULD,
216 ssa.OpARM64FDIVS,
217 ssa.OpARM64FDIVD,
218 ssa.OpARM64FMINS,
219 ssa.OpARM64FMIND,
220 ssa.OpARM64FMAXS,
221 ssa.OpARM64FMAXD,
222 ssa.OpARM64ROR,
223 ssa.OpARM64RORW:
224 r := v.Reg()
225 r1 := v.Args[0].Reg()
226 r2 := v.Args[1].Reg()
227 p := s.Prog(v.Op.Asm())
228 p.From.Type = obj.TYPE_REG
229 p.From.Reg = r2
230 p.Reg = r1
231 p.To.Type = obj.TYPE_REG
232 p.To.Reg = r
233 case ssa.OpARM64FMADDS,
234 ssa.OpARM64FMADDD,
235 ssa.OpARM64FNMADDS,
236 ssa.OpARM64FNMADDD,
237 ssa.OpARM64FMSUBS,
238 ssa.OpARM64FMSUBD,
239 ssa.OpARM64FNMSUBS,
240 ssa.OpARM64FNMSUBD,
241 ssa.OpARM64MADD,
242 ssa.OpARM64MADDW,
243 ssa.OpARM64MSUB,
244 ssa.OpARM64MSUBW:
245 rt := v.Reg()
246 ra := v.Args[0].Reg()
247 rm := v.Args[1].Reg()
248 rn := v.Args[2].Reg()
249 p := s.Prog(v.Op.Asm())
250 p.Reg = ra
251 p.From.Type = obj.TYPE_REG
252 p.From.Reg = rm
253 p.AddRestSourceReg(rn)
254 p.To.Type = obj.TYPE_REG
255 p.To.Reg = rt
256 case ssa.OpARM64ADDconst,
257 ssa.OpARM64SUBconst,
258 ssa.OpARM64ANDconst,
259 ssa.OpARM64ORconst,
260 ssa.OpARM64XORconst,
261 ssa.OpARM64SLLconst,
262 ssa.OpARM64SRLconst,
263 ssa.OpARM64SRAconst,
264 ssa.OpARM64RORconst,
265 ssa.OpARM64RORWconst:
266 p := s.Prog(v.Op.Asm())
267 p.From.Type = obj.TYPE_CONST
268 p.From.Offset = v.AuxInt
269 p.Reg = v.Args[0].Reg()
270 p.To.Type = obj.TYPE_REG
271 p.To.Reg = v.Reg()
272 case ssa.OpARM64ADDSconstflags:
273 p := s.Prog(v.Op.Asm())
274 p.From.Type = obj.TYPE_CONST
275 p.From.Offset = v.AuxInt
276 p.Reg = v.Args[0].Reg()
277 p.To.Type = obj.TYPE_REG
278 p.To.Reg = v.Reg0()
279 case ssa.OpARM64ADCzerocarry:
280 p := s.Prog(v.Op.Asm())
281 p.From.Type = obj.TYPE_REG
282 p.From.Reg = arm64.REGZERO
283 p.Reg = arm64.REGZERO
284 p.To.Type = obj.TYPE_REG
285 p.To.Reg = v.Reg()
286 case ssa.OpARM64ADCSflags,
287 ssa.OpARM64ADDSflags,
288 ssa.OpARM64SBCSflags,
289 ssa.OpARM64SUBSflags:
290 r := v.Reg0()
291 r1 := v.Args[0].Reg()
292 r2 := v.Args[1].Reg()
293 p := s.Prog(v.Op.Asm())
294 p.From.Type = obj.TYPE_REG
295 p.From.Reg = r2
296 p.Reg = r1
297 p.To.Type = obj.TYPE_REG
298 p.To.Reg = r
299 case ssa.OpARM64NEGSflags:
300 p := s.Prog(v.Op.Asm())
301 p.From.Type = obj.TYPE_REG
302 p.From.Reg = v.Args[0].Reg()
303 p.To.Type = obj.TYPE_REG
304 p.To.Reg = v.Reg0()
305 case ssa.OpARM64NGCzerocarry:
306 p := s.Prog(v.Op.Asm())
307 p.From.Type = obj.TYPE_REG
308 p.From.Reg = arm64.REGZERO
309 p.To.Type = obj.TYPE_REG
310 p.To.Reg = v.Reg()
311 case ssa.OpARM64EXTRconst,
312 ssa.OpARM64EXTRWconst:
313 p := s.Prog(v.Op.Asm())
314 p.From.Type = obj.TYPE_CONST
315 p.From.Offset = v.AuxInt
316 p.AddRestSourceReg(v.Args[0].Reg())
317 p.Reg = v.Args[1].Reg()
318 p.To.Type = obj.TYPE_REG
319 p.To.Reg = v.Reg()
320 case ssa.OpARM64MVNshiftLL, ssa.OpARM64NEGshiftLL:
321 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
322 case ssa.OpARM64MVNshiftRL, ssa.OpARM64NEGshiftRL:
323 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
324 case ssa.OpARM64MVNshiftRA, ssa.OpARM64NEGshiftRA:
325 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
326 case ssa.OpARM64MVNshiftRO:
327 genshift(s, v, v.Op.Asm(), 0, v.Args[0].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
328 case ssa.OpARM64ADDshiftLL,
329 ssa.OpARM64SUBshiftLL,
330 ssa.OpARM64ANDshiftLL,
331 ssa.OpARM64ORshiftLL,
332 ssa.OpARM64XORshiftLL,
333 ssa.OpARM64EONshiftLL,
334 ssa.OpARM64ORNshiftLL,
335 ssa.OpARM64BICshiftLL:
336 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LL, v.AuxInt)
337 case ssa.OpARM64ADDshiftRL,
338 ssa.OpARM64SUBshiftRL,
339 ssa.OpARM64ANDshiftRL,
340 ssa.OpARM64ORshiftRL,
341 ssa.OpARM64XORshiftRL,
342 ssa.OpARM64EONshiftRL,
343 ssa.OpARM64ORNshiftRL,
344 ssa.OpARM64BICshiftRL:
345 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_LR, v.AuxInt)
346 case ssa.OpARM64ADDshiftRA,
347 ssa.OpARM64SUBshiftRA,
348 ssa.OpARM64ANDshiftRA,
349 ssa.OpARM64ORshiftRA,
350 ssa.OpARM64XORshiftRA,
351 ssa.OpARM64EONshiftRA,
352 ssa.OpARM64ORNshiftRA,
353 ssa.OpARM64BICshiftRA:
354 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_AR, v.AuxInt)
355 case ssa.OpARM64ANDshiftRO,
356 ssa.OpARM64ORshiftRO,
357 ssa.OpARM64XORshiftRO,
358 ssa.OpARM64EONshiftRO,
359 ssa.OpARM64ORNshiftRO,
360 ssa.OpARM64BICshiftRO:
361 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), v.Reg(), arm64.SHIFT_ROR, v.AuxInt)
362 case ssa.OpARM64MOVDconst:
363 p := s.Prog(v.Op.Asm())
364 p.From.Type = obj.TYPE_CONST
365 p.From.Offset = v.AuxInt
366 p.To.Type = obj.TYPE_REG
367 p.To.Reg = v.Reg()
368 case ssa.OpARM64FMOVSconst,
369 ssa.OpARM64FMOVDconst:
370 p := s.Prog(v.Op.Asm())
371 p.From.Type = obj.TYPE_FCONST
372 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
373 p.To.Type = obj.TYPE_REG
374 p.To.Reg = v.Reg()
375 case ssa.OpARM64FCMPS0,
376 ssa.OpARM64FCMPD0:
377 p := s.Prog(v.Op.Asm())
378 p.From.Type = obj.TYPE_FCONST
379 p.From.Val = math.Float64frombits(0)
380 p.Reg = v.Args[0].Reg()
381 case ssa.OpARM64CMP,
382 ssa.OpARM64CMPW,
383 ssa.OpARM64CMN,
384 ssa.OpARM64CMNW,
385 ssa.OpARM64TST,
386 ssa.OpARM64TSTW,
387 ssa.OpARM64FCMPS,
388 ssa.OpARM64FCMPD:
389 p := s.Prog(v.Op.Asm())
390 p.From.Type = obj.TYPE_REG
391 p.From.Reg = v.Args[1].Reg()
392 p.Reg = v.Args[0].Reg()
393 case ssa.OpARM64CMPconst,
394 ssa.OpARM64CMPWconst,
395 ssa.OpARM64CMNconst,
396 ssa.OpARM64CMNWconst,
397 ssa.OpARM64TSTconst,
398 ssa.OpARM64TSTWconst:
399 p := s.Prog(v.Op.Asm())
400 p.From.Type = obj.TYPE_CONST
401 p.From.Offset = v.AuxInt
402 p.Reg = v.Args[0].Reg()
403 case ssa.OpARM64CMPshiftLL, ssa.OpARM64CMNshiftLL, ssa.OpARM64TSTshiftLL:
404 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LL, v.AuxInt)
405 case ssa.OpARM64CMPshiftRL, ssa.OpARM64CMNshiftRL, ssa.OpARM64TSTshiftRL:
406 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_LR, v.AuxInt)
407 case ssa.OpARM64CMPshiftRA, ssa.OpARM64CMNshiftRA, ssa.OpARM64TSTshiftRA:
408 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_AR, v.AuxInt)
409 case ssa.OpARM64TSTshiftRO:
410 genshift(s, v, v.Op.Asm(), v.Args[0].Reg(), v.Args[1].Reg(), 0, arm64.SHIFT_ROR, v.AuxInt)
411 case ssa.OpARM64MOVDaddr:
412 p := s.Prog(arm64.AMOVD)
413 p.From.Type = obj.TYPE_ADDR
414 p.From.Reg = v.Args[0].Reg()
415 p.To.Type = obj.TYPE_REG
416 p.To.Reg = v.Reg()
417
418 var wantreg string
419
420
421
422
423
424 switch v.Aux.(type) {
425 default:
426 v.Fatalf("aux is of unknown type %T", v.Aux)
427 case *obj.LSym:
428 wantreg = "SB"
429 ssagen.AddAux(&p.From, v)
430 case *ir.Name:
431 wantreg = "SP"
432 ssagen.AddAux(&p.From, v)
433 case nil:
434
435 wantreg = "SP"
436 p.From.Offset = v.AuxInt
437 }
438 if reg := v.Args[0].RegName(); reg != wantreg {
439 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
440 }
441 case ssa.OpARM64MOVBload,
442 ssa.OpARM64MOVBUload,
443 ssa.OpARM64MOVHload,
444 ssa.OpARM64MOVHUload,
445 ssa.OpARM64MOVWload,
446 ssa.OpARM64MOVWUload,
447 ssa.OpARM64MOVDload,
448 ssa.OpARM64FMOVSload,
449 ssa.OpARM64FMOVDload:
450 p := s.Prog(v.Op.Asm())
451 p.From.Type = obj.TYPE_MEM
452 p.From.Reg = v.Args[0].Reg()
453 ssagen.AddAux(&p.From, v)
454 p.To.Type = obj.TYPE_REG
455 p.To.Reg = v.Reg()
456 case ssa.OpARM64LDP:
457 p := s.Prog(v.Op.Asm())
458 p.From.Type = obj.TYPE_MEM
459 p.From.Reg = v.Args[0].Reg()
460 ssagen.AddAux(&p.From, v)
461 p.To.Type = obj.TYPE_REGREG
462 p.To.Reg = v.Reg0()
463 p.To.Offset = int64(v.Reg1())
464 case ssa.OpARM64MOVBloadidx,
465 ssa.OpARM64MOVBUloadidx,
466 ssa.OpARM64MOVHloadidx,
467 ssa.OpARM64MOVHUloadidx,
468 ssa.OpARM64MOVWloadidx,
469 ssa.OpARM64MOVWUloadidx,
470 ssa.OpARM64MOVDloadidx,
471 ssa.OpARM64FMOVSloadidx,
472 ssa.OpARM64FMOVDloadidx,
473 ssa.OpARM64MOVHloadidx2,
474 ssa.OpARM64MOVHUloadidx2,
475 ssa.OpARM64MOVWloadidx4,
476 ssa.OpARM64MOVWUloadidx4,
477 ssa.OpARM64MOVDloadidx8,
478 ssa.OpARM64FMOVDloadidx8,
479 ssa.OpARM64FMOVSloadidx4:
480 p := s.Prog(v.Op.Asm())
481 p.From = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
482 p.To.Type = obj.TYPE_REG
483 p.To.Reg = v.Reg()
484 case ssa.OpARM64LDAR,
485 ssa.OpARM64LDARB,
486 ssa.OpARM64LDARW:
487 p := s.Prog(v.Op.Asm())
488 p.From.Type = obj.TYPE_MEM
489 p.From.Reg = v.Args[0].Reg()
490 ssagen.AddAux(&p.From, v)
491 p.To.Type = obj.TYPE_REG
492 p.To.Reg = v.Reg0()
493 case ssa.OpARM64MOVBstore,
494 ssa.OpARM64MOVHstore,
495 ssa.OpARM64MOVWstore,
496 ssa.OpARM64MOVDstore,
497 ssa.OpARM64FMOVSstore,
498 ssa.OpARM64FMOVDstore,
499 ssa.OpARM64STLRB,
500 ssa.OpARM64STLR,
501 ssa.OpARM64STLRW:
502 p := s.Prog(v.Op.Asm())
503 p.From.Type = obj.TYPE_REG
504 p.From.Reg = v.Args[1].Reg()
505 p.To.Type = obj.TYPE_MEM
506 p.To.Reg = v.Args[0].Reg()
507 ssagen.AddAux(&p.To, v)
508 case ssa.OpARM64MOVBstoreidx,
509 ssa.OpARM64MOVHstoreidx,
510 ssa.OpARM64MOVWstoreidx,
511 ssa.OpARM64MOVDstoreidx,
512 ssa.OpARM64FMOVSstoreidx,
513 ssa.OpARM64FMOVDstoreidx,
514 ssa.OpARM64MOVHstoreidx2,
515 ssa.OpARM64MOVWstoreidx4,
516 ssa.OpARM64FMOVSstoreidx4,
517 ssa.OpARM64MOVDstoreidx8,
518 ssa.OpARM64FMOVDstoreidx8:
519 p := s.Prog(v.Op.Asm())
520 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
521 p.From.Type = obj.TYPE_REG
522 p.From.Reg = v.Args[2].Reg()
523 case ssa.OpARM64STP:
524 p := s.Prog(v.Op.Asm())
525 p.From.Type = obj.TYPE_REGREG
526 p.From.Reg = v.Args[1].Reg()
527 p.From.Offset = int64(v.Args[2].Reg())
528 p.To.Type = obj.TYPE_MEM
529 p.To.Reg = v.Args[0].Reg()
530 ssagen.AddAux(&p.To, v)
531 case ssa.OpARM64MOVBstorezero,
532 ssa.OpARM64MOVHstorezero,
533 ssa.OpARM64MOVWstorezero,
534 ssa.OpARM64MOVDstorezero:
535 p := s.Prog(v.Op.Asm())
536 p.From.Type = obj.TYPE_REG
537 p.From.Reg = arm64.REGZERO
538 p.To.Type = obj.TYPE_MEM
539 p.To.Reg = v.Args[0].Reg()
540 ssagen.AddAux(&p.To, v)
541 case ssa.OpARM64MOVBstorezeroidx,
542 ssa.OpARM64MOVHstorezeroidx,
543 ssa.OpARM64MOVWstorezeroidx,
544 ssa.OpARM64MOVDstorezeroidx,
545 ssa.OpARM64MOVHstorezeroidx2,
546 ssa.OpARM64MOVWstorezeroidx4,
547 ssa.OpARM64MOVDstorezeroidx8:
548 p := s.Prog(v.Op.Asm())
549 p.To = genIndexedOperand(v.Op, v.Args[0].Reg(), v.Args[1].Reg())
550 p.From.Type = obj.TYPE_REG
551 p.From.Reg = arm64.REGZERO
552 case ssa.OpARM64MOVQstorezero:
553 p := s.Prog(v.Op.Asm())
554 p.From.Type = obj.TYPE_REGREG
555 p.From.Reg = arm64.REGZERO
556 p.From.Offset = int64(arm64.REGZERO)
557 p.To.Type = obj.TYPE_MEM
558 p.To.Reg = v.Args[0].Reg()
559 ssagen.AddAux(&p.To, v)
560 case ssa.OpARM64BFI,
561 ssa.OpARM64BFXIL:
562 p := s.Prog(v.Op.Asm())
563 p.From.Type = obj.TYPE_CONST
564 p.From.Offset = v.AuxInt >> 8
565 p.AddRestSourceConst(v.AuxInt & 0xff)
566 p.Reg = v.Args[1].Reg()
567 p.To.Type = obj.TYPE_REG
568 p.To.Reg = v.Reg()
569 case ssa.OpARM64SBFIZ,
570 ssa.OpARM64SBFX,
571 ssa.OpARM64UBFIZ,
572 ssa.OpARM64UBFX:
573 p := s.Prog(v.Op.Asm())
574 p.From.Type = obj.TYPE_CONST
575 p.From.Offset = v.AuxInt >> 8
576 p.AddRestSourceConst(v.AuxInt & 0xff)
577 p.Reg = v.Args[0].Reg()
578 p.To.Type = obj.TYPE_REG
579 p.To.Reg = v.Reg()
580 case ssa.OpARM64LoweredAtomicExchange64,
581 ssa.OpARM64LoweredAtomicExchange32:
582
583
584
585 ld := arm64.ALDAXR
586 st := arm64.ASTLXR
587 if v.Op == ssa.OpARM64LoweredAtomicExchange32 {
588 ld = arm64.ALDAXRW
589 st = arm64.ASTLXRW
590 }
591 r0 := v.Args[0].Reg()
592 r1 := v.Args[1].Reg()
593 out := v.Reg0()
594 p := s.Prog(ld)
595 p.From.Type = obj.TYPE_MEM
596 p.From.Reg = r0
597 p.To.Type = obj.TYPE_REG
598 p.To.Reg = out
599 p1 := s.Prog(st)
600 p1.From.Type = obj.TYPE_REG
601 p1.From.Reg = r1
602 p1.To.Type = obj.TYPE_MEM
603 p1.To.Reg = r0
604 p1.RegTo2 = arm64.REGTMP
605 p2 := s.Prog(arm64.ACBNZ)
606 p2.From.Type = obj.TYPE_REG
607 p2.From.Reg = arm64.REGTMP
608 p2.To.Type = obj.TYPE_BRANCH
609 p2.To.SetTarget(p)
610 case ssa.OpARM64LoweredAtomicExchange64Variant,
611 ssa.OpARM64LoweredAtomicExchange32Variant:
612 swap := arm64.ASWPALD
613 if v.Op == ssa.OpARM64LoweredAtomicExchange32Variant {
614 swap = arm64.ASWPALW
615 }
616 r0 := v.Args[0].Reg()
617 r1 := v.Args[1].Reg()
618 out := v.Reg0()
619
620
621 p := s.Prog(swap)
622 p.From.Type = obj.TYPE_REG
623 p.From.Reg = r1
624 p.To.Type = obj.TYPE_MEM
625 p.To.Reg = r0
626 p.RegTo2 = out
627
628 case ssa.OpARM64LoweredAtomicAdd64,
629 ssa.OpARM64LoweredAtomicAdd32:
630
631
632
633
634 ld := arm64.ALDAXR
635 st := arm64.ASTLXR
636 if v.Op == ssa.OpARM64LoweredAtomicAdd32 {
637 ld = arm64.ALDAXRW
638 st = arm64.ASTLXRW
639 }
640 r0 := v.Args[0].Reg()
641 r1 := v.Args[1].Reg()
642 out := v.Reg0()
643 p := s.Prog(ld)
644 p.From.Type = obj.TYPE_MEM
645 p.From.Reg = r0
646 p.To.Type = obj.TYPE_REG
647 p.To.Reg = out
648 p1 := s.Prog(arm64.AADD)
649 p1.From.Type = obj.TYPE_REG
650 p1.From.Reg = r1
651 p1.To.Type = obj.TYPE_REG
652 p1.To.Reg = out
653 p2 := s.Prog(st)
654 p2.From.Type = obj.TYPE_REG
655 p2.From.Reg = out
656 p2.To.Type = obj.TYPE_MEM
657 p2.To.Reg = r0
658 p2.RegTo2 = arm64.REGTMP
659 p3 := s.Prog(arm64.ACBNZ)
660 p3.From.Type = obj.TYPE_REG
661 p3.From.Reg = arm64.REGTMP
662 p3.To.Type = obj.TYPE_BRANCH
663 p3.To.SetTarget(p)
664 case ssa.OpARM64LoweredAtomicAdd64Variant,
665 ssa.OpARM64LoweredAtomicAdd32Variant:
666
667
668 op := arm64.ALDADDALD
669 if v.Op == ssa.OpARM64LoweredAtomicAdd32Variant {
670 op = arm64.ALDADDALW
671 }
672 r0 := v.Args[0].Reg()
673 r1 := v.Args[1].Reg()
674 out := v.Reg0()
675 p := s.Prog(op)
676 p.From.Type = obj.TYPE_REG
677 p.From.Reg = r1
678 p.To.Type = obj.TYPE_MEM
679 p.To.Reg = r0
680 p.RegTo2 = out
681 p1 := s.Prog(arm64.AADD)
682 p1.From.Type = obj.TYPE_REG
683 p1.From.Reg = r1
684 p1.To.Type = obj.TYPE_REG
685 p1.To.Reg = out
686 case ssa.OpARM64LoweredAtomicCas64,
687 ssa.OpARM64LoweredAtomicCas32:
688
689
690
691
692
693
694 ld := arm64.ALDAXR
695 st := arm64.ASTLXR
696 cmp := arm64.ACMP
697 if v.Op == ssa.OpARM64LoweredAtomicCas32 {
698 ld = arm64.ALDAXRW
699 st = arm64.ASTLXRW
700 cmp = arm64.ACMPW
701 }
702 r0 := v.Args[0].Reg()
703 r1 := v.Args[1].Reg()
704 r2 := v.Args[2].Reg()
705 out := v.Reg0()
706 p := s.Prog(ld)
707 p.From.Type = obj.TYPE_MEM
708 p.From.Reg = r0
709 p.To.Type = obj.TYPE_REG
710 p.To.Reg = arm64.REGTMP
711 p1 := s.Prog(cmp)
712 p1.From.Type = obj.TYPE_REG
713 p1.From.Reg = r1
714 p1.Reg = arm64.REGTMP
715 p2 := s.Prog(arm64.ABNE)
716 p2.To.Type = obj.TYPE_BRANCH
717 p3 := s.Prog(st)
718 p3.From.Type = obj.TYPE_REG
719 p3.From.Reg = r2
720 p3.To.Type = obj.TYPE_MEM
721 p3.To.Reg = r0
722 p3.RegTo2 = arm64.REGTMP
723 p4 := s.Prog(arm64.ACBNZ)
724 p4.From.Type = obj.TYPE_REG
725 p4.From.Reg = arm64.REGTMP
726 p4.To.Type = obj.TYPE_BRANCH
727 p4.To.SetTarget(p)
728 p5 := s.Prog(arm64.ACSET)
729 p5.From.Type = obj.TYPE_SPECIAL
730 p5.From.Offset = int64(arm64.SPOP_EQ)
731 p5.To.Type = obj.TYPE_REG
732 p5.To.Reg = out
733 p2.To.SetTarget(p5)
734 case ssa.OpARM64LoweredAtomicCas64Variant,
735 ssa.OpARM64LoweredAtomicCas32Variant:
736
737
738
739
740
741
742
743 cas := arm64.ACASALD
744 cmp := arm64.ACMP
745 mov := arm64.AMOVD
746 if v.Op == ssa.OpARM64LoweredAtomicCas32Variant {
747 cas = arm64.ACASALW
748 cmp = arm64.ACMPW
749 mov = arm64.AMOVW
750 }
751 r0 := v.Args[0].Reg()
752 r1 := v.Args[1].Reg()
753 r2 := v.Args[2].Reg()
754 out := v.Reg0()
755
756
757 p := s.Prog(mov)
758 p.From.Type = obj.TYPE_REG
759 p.From.Reg = r1
760 p.To.Type = obj.TYPE_REG
761 p.To.Reg = arm64.REGTMP
762
763
764 p1 := s.Prog(cas)
765 p1.From.Type = obj.TYPE_REG
766 p1.From.Reg = arm64.REGTMP
767 p1.To.Type = obj.TYPE_MEM
768 p1.To.Reg = r0
769 p1.RegTo2 = r2
770
771
772 p2 := s.Prog(cmp)
773 p2.From.Type = obj.TYPE_REG
774 p2.From.Reg = r1
775 p2.Reg = arm64.REGTMP
776
777
778 p3 := s.Prog(arm64.ACSET)
779 p3.From.Type = obj.TYPE_SPECIAL
780 p3.From.Offset = int64(arm64.SPOP_EQ)
781 p3.To.Type = obj.TYPE_REG
782 p3.To.Reg = out
783
784 case ssa.OpARM64LoweredAtomicAnd8,
785 ssa.OpARM64LoweredAtomicAnd32,
786 ssa.OpARM64LoweredAtomicOr8,
787 ssa.OpARM64LoweredAtomicOr32:
788
789
790
791
792 ld := arm64.ALDAXRB
793 st := arm64.ASTLXRB
794 if v.Op == ssa.OpARM64LoweredAtomicAnd32 || v.Op == ssa.OpARM64LoweredAtomicOr32 {
795 ld = arm64.ALDAXRW
796 st = arm64.ASTLXRW
797 }
798 r0 := v.Args[0].Reg()
799 r1 := v.Args[1].Reg()
800 out := v.Reg0()
801 p := s.Prog(ld)
802 p.From.Type = obj.TYPE_MEM
803 p.From.Reg = r0
804 p.To.Type = obj.TYPE_REG
805 p.To.Reg = out
806 p1 := s.Prog(v.Op.Asm())
807 p1.From.Type = obj.TYPE_REG
808 p1.From.Reg = r1
809 p1.To.Type = obj.TYPE_REG
810 p1.To.Reg = out
811 p2 := s.Prog(st)
812 p2.From.Type = obj.TYPE_REG
813 p2.From.Reg = out
814 p2.To.Type = obj.TYPE_MEM
815 p2.To.Reg = r0
816 p2.RegTo2 = arm64.REGTMP
817 p3 := s.Prog(arm64.ACBNZ)
818 p3.From.Type = obj.TYPE_REG
819 p3.From.Reg = arm64.REGTMP
820 p3.To.Type = obj.TYPE_BRANCH
821 p3.To.SetTarget(p)
822 case ssa.OpARM64LoweredAtomicAnd8Variant,
823 ssa.OpARM64LoweredAtomicAnd32Variant:
824 atomic_clear := arm64.ALDCLRALW
825 if v.Op == ssa.OpARM64LoweredAtomicAnd8Variant {
826 atomic_clear = arm64.ALDCLRALB
827 }
828 r0 := v.Args[0].Reg()
829 r1 := v.Args[1].Reg()
830 out := v.Reg0()
831
832
833 p := s.Prog(arm64.AMVN)
834 p.From.Type = obj.TYPE_REG
835 p.From.Reg = r1
836 p.To.Type = obj.TYPE_REG
837 p.To.Reg = arm64.REGTMP
838
839
840 p1 := s.Prog(atomic_clear)
841 p1.From.Type = obj.TYPE_REG
842 p1.From.Reg = arm64.REGTMP
843 p1.To.Type = obj.TYPE_MEM
844 p1.To.Reg = r0
845 p1.RegTo2 = out
846
847
848 p2 := s.Prog(arm64.AAND)
849 p2.From.Type = obj.TYPE_REG
850 p2.From.Reg = r1
851 p2.To.Type = obj.TYPE_REG
852 p2.To.Reg = out
853
854 case ssa.OpARM64LoweredAtomicOr8Variant,
855 ssa.OpARM64LoweredAtomicOr32Variant:
856 atomic_or := arm64.ALDORALW
857 if v.Op == ssa.OpARM64LoweredAtomicOr8Variant {
858 atomic_or = arm64.ALDORALB
859 }
860 r0 := v.Args[0].Reg()
861 r1 := v.Args[1].Reg()
862 out := v.Reg0()
863
864
865 p := s.Prog(atomic_or)
866 p.From.Type = obj.TYPE_REG
867 p.From.Reg = r1
868 p.To.Type = obj.TYPE_MEM
869 p.To.Reg = r0
870 p.RegTo2 = out
871
872
873 p2 := s.Prog(arm64.AORR)
874 p2.From.Type = obj.TYPE_REG
875 p2.From.Reg = r1
876 p2.To.Type = obj.TYPE_REG
877 p2.To.Reg = out
878
879 case ssa.OpARM64MOVBreg,
880 ssa.OpARM64MOVBUreg,
881 ssa.OpARM64MOVHreg,
882 ssa.OpARM64MOVHUreg,
883 ssa.OpARM64MOVWreg,
884 ssa.OpARM64MOVWUreg:
885 a := v.Args[0]
886 for a.Op == ssa.OpCopy || a.Op == ssa.OpARM64MOVDreg {
887 a = a.Args[0]
888 }
889 if a.Op == ssa.OpLoadReg {
890 t := a.Type
891 switch {
892 case v.Op == ssa.OpARM64MOVBreg && t.Size() == 1 && t.IsSigned(),
893 v.Op == ssa.OpARM64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
894 v.Op == ssa.OpARM64MOVHreg && t.Size() == 2 && t.IsSigned(),
895 v.Op == ssa.OpARM64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
896 v.Op == ssa.OpARM64MOVWreg && t.Size() == 4 && t.IsSigned(),
897 v.Op == ssa.OpARM64MOVWUreg && t.Size() == 4 && !t.IsSigned():
898
899 if v.Reg() == v.Args[0].Reg() {
900 return
901 }
902 p := s.Prog(arm64.AMOVD)
903 p.From.Type = obj.TYPE_REG
904 p.From.Reg = v.Args[0].Reg()
905 p.To.Type = obj.TYPE_REG
906 p.To.Reg = v.Reg()
907 return
908 default:
909 }
910 }
911 fallthrough
912 case ssa.OpARM64MVN,
913 ssa.OpARM64NEG,
914 ssa.OpARM64FABSD,
915 ssa.OpARM64FMOVDfpgp,
916 ssa.OpARM64FMOVDgpfp,
917 ssa.OpARM64FMOVSfpgp,
918 ssa.OpARM64FMOVSgpfp,
919 ssa.OpARM64FNEGS,
920 ssa.OpARM64FNEGD,
921 ssa.OpARM64FSQRTS,
922 ssa.OpARM64FSQRTD,
923 ssa.OpARM64FCVTZSSW,
924 ssa.OpARM64FCVTZSDW,
925 ssa.OpARM64FCVTZUSW,
926 ssa.OpARM64FCVTZUDW,
927 ssa.OpARM64FCVTZSS,
928 ssa.OpARM64FCVTZSD,
929 ssa.OpARM64FCVTZUS,
930 ssa.OpARM64FCVTZUD,
931 ssa.OpARM64SCVTFWS,
932 ssa.OpARM64SCVTFWD,
933 ssa.OpARM64SCVTFS,
934 ssa.OpARM64SCVTFD,
935 ssa.OpARM64UCVTFWS,
936 ssa.OpARM64UCVTFWD,
937 ssa.OpARM64UCVTFS,
938 ssa.OpARM64UCVTFD,
939 ssa.OpARM64FCVTSD,
940 ssa.OpARM64FCVTDS,
941 ssa.OpARM64REV,
942 ssa.OpARM64REVW,
943 ssa.OpARM64REV16,
944 ssa.OpARM64REV16W,
945 ssa.OpARM64RBIT,
946 ssa.OpARM64RBITW,
947 ssa.OpARM64CLZ,
948 ssa.OpARM64CLZW,
949 ssa.OpARM64FRINTAD,
950 ssa.OpARM64FRINTMD,
951 ssa.OpARM64FRINTND,
952 ssa.OpARM64FRINTPD,
953 ssa.OpARM64FRINTZD:
954 p := s.Prog(v.Op.Asm())
955 p.From.Type = obj.TYPE_REG
956 p.From.Reg = v.Args[0].Reg()
957 p.To.Type = obj.TYPE_REG
958 p.To.Reg = v.Reg()
959 case ssa.OpARM64LoweredRound32F, ssa.OpARM64LoweredRound64F:
960
961 case ssa.OpARM64VCNT:
962 p := s.Prog(v.Op.Asm())
963 p.From.Type = obj.TYPE_REG
964 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
965 p.To.Type = obj.TYPE_REG
966 p.To.Reg = (v.Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
967 case ssa.OpARM64VUADDLV:
968 p := s.Prog(v.Op.Asm())
969 p.From.Type = obj.TYPE_REG
970 p.From.Reg = (v.Args[0].Reg()-arm64.REG_F0)&31 + arm64.REG_ARNG + ((arm64.ARNG_8B & 15) << 5)
971 p.To.Type = obj.TYPE_REG
972 p.To.Reg = v.Reg() - arm64.REG_F0 + arm64.REG_V0
973 case ssa.OpARM64CSEL, ssa.OpARM64CSEL0:
974 r1 := int16(arm64.REGZERO)
975 if v.Op != ssa.OpARM64CSEL0 {
976 r1 = v.Args[1].Reg()
977 }
978 p := s.Prog(v.Op.Asm())
979 p.From.Type = obj.TYPE_SPECIAL
980 condCode := condBits[ssa.Op(v.AuxInt)]
981 p.From.Offset = int64(condCode)
982 p.Reg = v.Args[0].Reg()
983 p.AddRestSourceReg(r1)
984 p.To.Type = obj.TYPE_REG
985 p.To.Reg = v.Reg()
986 case ssa.OpARM64CSINC, ssa.OpARM64CSINV, ssa.OpARM64CSNEG:
987 p := s.Prog(v.Op.Asm())
988 p.From.Type = obj.TYPE_SPECIAL
989 condCode := condBits[ssa.Op(v.AuxInt)]
990 p.From.Offset = int64(condCode)
991 p.Reg = v.Args[0].Reg()
992 p.AddRestSourceReg(v.Args[1].Reg())
993 p.To.Type = obj.TYPE_REG
994 p.To.Reg = v.Reg()
995 case ssa.OpARM64CSETM:
996 p := s.Prog(arm64.ACSETM)
997 p.From.Type = obj.TYPE_SPECIAL
998 condCode := condBits[ssa.Op(v.AuxInt)]
999 p.From.Offset = int64(condCode)
1000 p.To.Type = obj.TYPE_REG
1001 p.To.Reg = v.Reg()
1002 case ssa.OpARM64DUFFZERO:
1003
1004 p := s.Prog(obj.ADUFFZERO)
1005 p.To.Type = obj.TYPE_MEM
1006 p.To.Name = obj.NAME_EXTERN
1007 p.To.Sym = ir.Syms.Duffzero
1008 p.To.Offset = v.AuxInt
1009 case ssa.OpARM64LoweredZero:
1010
1011
1012
1013
1014 p := s.Prog(arm64.ASTP)
1015 p.Scond = arm64.C_XPOST
1016 p.From.Type = obj.TYPE_REGREG
1017 p.From.Reg = arm64.REGZERO
1018 p.From.Offset = int64(arm64.REGZERO)
1019 p.To.Type = obj.TYPE_MEM
1020 p.To.Reg = arm64.REG_R16
1021 p.To.Offset = 16
1022 p2 := s.Prog(arm64.ACMP)
1023 p2.From.Type = obj.TYPE_REG
1024 p2.From.Reg = v.Args[1].Reg()
1025 p2.Reg = arm64.REG_R16
1026 p3 := s.Prog(arm64.ABLE)
1027 p3.To.Type = obj.TYPE_BRANCH
1028 p3.To.SetTarget(p)
1029 case ssa.OpARM64DUFFCOPY:
1030 p := s.Prog(obj.ADUFFCOPY)
1031 p.To.Type = obj.TYPE_MEM
1032 p.To.Name = obj.NAME_EXTERN
1033 p.To.Sym = ir.Syms.Duffcopy
1034 p.To.Offset = v.AuxInt
1035 case ssa.OpARM64LoweredMove:
1036
1037
1038
1039
1040
1041 p := s.Prog(arm64.ALDP)
1042 p.Scond = arm64.C_XPOST
1043 p.From.Type = obj.TYPE_MEM
1044 p.From.Reg = arm64.REG_R16
1045 p.From.Offset = 16
1046 p.To.Type = obj.TYPE_REGREG
1047 p.To.Reg = arm64.REG_R25
1048 p.To.Offset = int64(arm64.REGTMP)
1049 p2 := s.Prog(arm64.ASTP)
1050 p2.Scond = arm64.C_XPOST
1051 p2.From.Type = obj.TYPE_REGREG
1052 p2.From.Reg = arm64.REG_R25
1053 p2.From.Offset = int64(arm64.REGTMP)
1054 p2.To.Type = obj.TYPE_MEM
1055 p2.To.Reg = arm64.REG_R17
1056 p2.To.Offset = 16
1057 p3 := s.Prog(arm64.ACMP)
1058 p3.From.Type = obj.TYPE_REG
1059 p3.From.Reg = v.Args[2].Reg()
1060 p3.Reg = arm64.REG_R16
1061 p4 := s.Prog(arm64.ABLE)
1062 p4.To.Type = obj.TYPE_BRANCH
1063 p4.To.SetTarget(p)
1064 case ssa.OpARM64CALLstatic, ssa.OpARM64CALLclosure, ssa.OpARM64CALLinter:
1065 s.Call(v)
1066 case ssa.OpARM64CALLtail:
1067 s.TailCall(v)
1068 case ssa.OpARM64LoweredWB:
1069 p := s.Prog(obj.ACALL)
1070 p.To.Type = obj.TYPE_MEM
1071 p.To.Name = obj.NAME_EXTERN
1072
1073 p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
1074
1075 case ssa.OpARM64LoweredPanicBoundsA, ssa.OpARM64LoweredPanicBoundsB, ssa.OpARM64LoweredPanicBoundsC:
1076 p := s.Prog(obj.ACALL)
1077 p.To.Type = obj.TYPE_MEM
1078 p.To.Name = obj.NAME_EXTERN
1079 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
1080 s.UseArgs(16)
1081 case ssa.OpARM64LoweredNilCheck:
1082
1083 p := s.Prog(arm64.AMOVB)
1084 p.From.Type = obj.TYPE_MEM
1085 p.From.Reg = v.Args[0].Reg()
1086 ssagen.AddAux(&p.From, v)
1087 p.To.Type = obj.TYPE_REG
1088 p.To.Reg = arm64.REGTMP
1089 if logopt.Enabled() {
1090 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
1091 }
1092 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
1093 base.WarnfAt(v.Pos, "generated nil check")
1094 }
1095 case ssa.OpARM64Equal,
1096 ssa.OpARM64NotEqual,
1097 ssa.OpARM64LessThan,
1098 ssa.OpARM64LessEqual,
1099 ssa.OpARM64GreaterThan,
1100 ssa.OpARM64GreaterEqual,
1101 ssa.OpARM64LessThanU,
1102 ssa.OpARM64LessEqualU,
1103 ssa.OpARM64GreaterThanU,
1104 ssa.OpARM64GreaterEqualU,
1105 ssa.OpARM64LessThanF,
1106 ssa.OpARM64LessEqualF,
1107 ssa.OpARM64GreaterThanF,
1108 ssa.OpARM64GreaterEqualF,
1109 ssa.OpARM64NotLessThanF,
1110 ssa.OpARM64NotLessEqualF,
1111 ssa.OpARM64NotGreaterThanF,
1112 ssa.OpARM64NotGreaterEqualF,
1113 ssa.OpARM64LessThanNoov,
1114 ssa.OpARM64GreaterEqualNoov:
1115
1116 p := s.Prog(arm64.ACSET)
1117 p.From.Type = obj.TYPE_SPECIAL
1118 condCode := condBits[v.Op]
1119 p.From.Offset = int64(condCode)
1120 p.To.Type = obj.TYPE_REG
1121 p.To.Reg = v.Reg()
1122 case ssa.OpARM64PRFM:
1123 p := s.Prog(v.Op.Asm())
1124 p.From.Type = obj.TYPE_MEM
1125 p.From.Reg = v.Args[0].Reg()
1126 p.To.Type = obj.TYPE_CONST
1127 p.To.Offset = v.AuxInt
1128 case ssa.OpARM64LoweredGetClosurePtr:
1129
1130 ssagen.CheckLoweredGetClosurePtr(v)
1131 case ssa.OpARM64LoweredGetCallerSP:
1132
1133 p := s.Prog(arm64.AMOVD)
1134 p.From.Type = obj.TYPE_ADDR
1135 p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
1136 p.From.Name = obj.NAME_PARAM
1137 p.To.Type = obj.TYPE_REG
1138 p.To.Reg = v.Reg()
1139 case ssa.OpARM64LoweredGetCallerPC:
1140 p := s.Prog(obj.AGETCALLERPC)
1141 p.To.Type = obj.TYPE_REG
1142 p.To.Reg = v.Reg()
1143 case ssa.OpARM64DMB:
1144 p := s.Prog(v.Op.Asm())
1145 p.From.Type = obj.TYPE_CONST
1146 p.From.Offset = v.AuxInt
1147 case ssa.OpARM64FlagConstant:
1148 v.Fatalf("FlagConstant op should never make it to codegen %v", v.LongString())
1149 case ssa.OpARM64InvertFlags:
1150 v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
1151 case ssa.OpClobber:
1152
1153
1154
1155 p := s.Prog(arm64.AMOVW)
1156 p.From.Type = obj.TYPE_CONST
1157 p.From.Offset = 0xdeaddead
1158 p.To.Type = obj.TYPE_REG
1159 p.To.Reg = arm64.REGTMP
1160 p = s.Prog(arm64.AMOVW)
1161 p.From.Type = obj.TYPE_REG
1162 p.From.Reg = arm64.REGTMP
1163 p.To.Type = obj.TYPE_MEM
1164 p.To.Reg = arm64.REGSP
1165 ssagen.AddAux(&p.To, v)
1166 p = s.Prog(arm64.AMOVW)
1167 p.From.Type = obj.TYPE_REG
1168 p.From.Reg = arm64.REGTMP
1169 p.To.Type = obj.TYPE_MEM
1170 p.To.Reg = arm64.REGSP
1171 ssagen.AddAux2(&p.To, v, v.AuxInt+4)
1172 case ssa.OpClobberReg:
1173 x := uint64(0xdeaddeaddeaddead)
1174 p := s.Prog(arm64.AMOVD)
1175 p.From.Type = obj.TYPE_CONST
1176 p.From.Offset = int64(x)
1177 p.To.Type = obj.TYPE_REG
1178 p.To.Reg = v.Reg()
1179 default:
1180 v.Fatalf("genValue not implemented: %s", v.LongString())
1181 }
1182 }
1183
1184 var condBits = map[ssa.Op]arm64.SpecialOperand{
1185 ssa.OpARM64Equal: arm64.SPOP_EQ,
1186 ssa.OpARM64NotEqual: arm64.SPOP_NE,
1187 ssa.OpARM64LessThan: arm64.SPOP_LT,
1188 ssa.OpARM64LessThanU: arm64.SPOP_LO,
1189 ssa.OpARM64LessEqual: arm64.SPOP_LE,
1190 ssa.OpARM64LessEqualU: arm64.SPOP_LS,
1191 ssa.OpARM64GreaterThan: arm64.SPOP_GT,
1192 ssa.OpARM64GreaterThanU: arm64.SPOP_HI,
1193 ssa.OpARM64GreaterEqual: arm64.SPOP_GE,
1194 ssa.OpARM64GreaterEqualU: arm64.SPOP_HS,
1195 ssa.OpARM64LessThanF: arm64.SPOP_MI,
1196 ssa.OpARM64LessEqualF: arm64.SPOP_LS,
1197 ssa.OpARM64GreaterThanF: arm64.SPOP_GT,
1198 ssa.OpARM64GreaterEqualF: arm64.SPOP_GE,
1199
1200
1201 ssa.OpARM64NotLessThanF: arm64.SPOP_PL,
1202 ssa.OpARM64NotLessEqualF: arm64.SPOP_HI,
1203 ssa.OpARM64NotGreaterThanF: arm64.SPOP_LE,
1204 ssa.OpARM64NotGreaterEqualF: arm64.SPOP_LT,
1205
1206 ssa.OpARM64LessThanNoov: arm64.SPOP_MI,
1207 ssa.OpARM64GreaterEqualNoov: arm64.SPOP_PL,
1208 }
1209
1210 var blockJump = map[ssa.BlockKind]struct {
1211 asm, invasm obj.As
1212 }{
1213 ssa.BlockARM64EQ: {arm64.ABEQ, arm64.ABNE},
1214 ssa.BlockARM64NE: {arm64.ABNE, arm64.ABEQ},
1215 ssa.BlockARM64LT: {arm64.ABLT, arm64.ABGE},
1216 ssa.BlockARM64GE: {arm64.ABGE, arm64.ABLT},
1217 ssa.BlockARM64LE: {arm64.ABLE, arm64.ABGT},
1218 ssa.BlockARM64GT: {arm64.ABGT, arm64.ABLE},
1219 ssa.BlockARM64ULT: {arm64.ABLO, arm64.ABHS},
1220 ssa.BlockARM64UGE: {arm64.ABHS, arm64.ABLO},
1221 ssa.BlockARM64UGT: {arm64.ABHI, arm64.ABLS},
1222 ssa.BlockARM64ULE: {arm64.ABLS, arm64.ABHI},
1223 ssa.BlockARM64Z: {arm64.ACBZ, arm64.ACBNZ},
1224 ssa.BlockARM64NZ: {arm64.ACBNZ, arm64.ACBZ},
1225 ssa.BlockARM64ZW: {arm64.ACBZW, arm64.ACBNZW},
1226 ssa.BlockARM64NZW: {arm64.ACBNZW, arm64.ACBZW},
1227 ssa.BlockARM64TBZ: {arm64.ATBZ, arm64.ATBNZ},
1228 ssa.BlockARM64TBNZ: {arm64.ATBNZ, arm64.ATBZ},
1229 ssa.BlockARM64FLT: {arm64.ABMI, arm64.ABPL},
1230 ssa.BlockARM64FGE: {arm64.ABGE, arm64.ABLT},
1231 ssa.BlockARM64FLE: {arm64.ABLS, arm64.ABHI},
1232 ssa.BlockARM64FGT: {arm64.ABGT, arm64.ABLE},
1233 ssa.BlockARM64LTnoov: {arm64.ABMI, arm64.ABPL},
1234 ssa.BlockARM64GEnoov: {arm64.ABPL, arm64.ABMI},
1235 }
1236
1237
1238 var leJumps = [2][2]ssagen.IndexJump{
1239 {{Jump: arm64.ABEQ, Index: 0}, {Jump: arm64.ABPL, Index: 1}},
1240 {{Jump: arm64.ABMI, Index: 0}, {Jump: arm64.ABEQ, Index: 0}},
1241 }
1242
1243
1244 var gtJumps = [2][2]ssagen.IndexJump{
1245 {{Jump: arm64.ABMI, Index: 1}, {Jump: arm64.ABEQ, Index: 1}},
1246 {{Jump: arm64.ABEQ, Index: 1}, {Jump: arm64.ABPL, Index: 0}},
1247 }
1248
1249 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
1250 switch b.Kind {
1251 case ssa.BlockPlain:
1252 if b.Succs[0].Block() != next {
1253 p := s.Prog(obj.AJMP)
1254 p.To.Type = obj.TYPE_BRANCH
1255 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1256 }
1257
1258 case ssa.BlockDefer:
1259
1260
1261
1262 p := s.Prog(arm64.ACMP)
1263 p.From.Type = obj.TYPE_CONST
1264 p.From.Offset = 0
1265 p.Reg = arm64.REG_R0
1266 p = s.Prog(arm64.ABNE)
1267 p.To.Type = obj.TYPE_BRANCH
1268 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
1269 if b.Succs[0].Block() != next {
1270 p := s.Prog(obj.AJMP)
1271 p.To.Type = obj.TYPE_BRANCH
1272 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
1273 }
1274
1275 case ssa.BlockExit, ssa.BlockRetJmp:
1276
1277 case ssa.BlockRet:
1278 s.Prog(obj.ARET)
1279
1280 case ssa.BlockARM64EQ, ssa.BlockARM64NE,
1281 ssa.BlockARM64LT, ssa.BlockARM64GE,
1282 ssa.BlockARM64LE, ssa.BlockARM64GT,
1283 ssa.BlockARM64ULT, ssa.BlockARM64UGT,
1284 ssa.BlockARM64ULE, ssa.BlockARM64UGE,
1285 ssa.BlockARM64Z, ssa.BlockARM64NZ,
1286 ssa.BlockARM64ZW, ssa.BlockARM64NZW,
1287 ssa.BlockARM64FLT, ssa.BlockARM64FGE,
1288 ssa.BlockARM64FLE, ssa.BlockARM64FGT,
1289 ssa.BlockARM64LTnoov, ssa.BlockARM64GEnoov:
1290 jmp := blockJump[b.Kind]
1291 var p *obj.Prog
1292 switch next {
1293 case b.Succs[0].Block():
1294 p = s.Br(jmp.invasm, b.Succs[1].Block())
1295 case b.Succs[1].Block():
1296 p = s.Br(jmp.asm, b.Succs[0].Block())
1297 default:
1298 if b.Likely != ssa.BranchUnlikely {
1299 p = s.Br(jmp.asm, b.Succs[0].Block())
1300 s.Br(obj.AJMP, b.Succs[1].Block())
1301 } else {
1302 p = s.Br(jmp.invasm, b.Succs[1].Block())
1303 s.Br(obj.AJMP, b.Succs[0].Block())
1304 }
1305 }
1306 if !b.Controls[0].Type.IsFlags() {
1307 p.From.Type = obj.TYPE_REG
1308 p.From.Reg = b.Controls[0].Reg()
1309 }
1310 case ssa.BlockARM64TBZ, ssa.BlockARM64TBNZ:
1311 jmp := blockJump[b.Kind]
1312 var p *obj.Prog
1313 switch next {
1314 case b.Succs[0].Block():
1315 p = s.Br(jmp.invasm, b.Succs[1].Block())
1316 case b.Succs[1].Block():
1317 p = s.Br(jmp.asm, b.Succs[0].Block())
1318 default:
1319 if b.Likely != ssa.BranchUnlikely {
1320 p = s.Br(jmp.asm, b.Succs[0].Block())
1321 s.Br(obj.AJMP, b.Succs[1].Block())
1322 } else {
1323 p = s.Br(jmp.invasm, b.Succs[1].Block())
1324 s.Br(obj.AJMP, b.Succs[0].Block())
1325 }
1326 }
1327 p.From.Offset = b.AuxInt
1328 p.From.Type = obj.TYPE_CONST
1329 p.Reg = b.Controls[0].Reg()
1330
1331 case ssa.BlockARM64LEnoov:
1332 s.CombJump(b, next, &leJumps)
1333 case ssa.BlockARM64GTnoov:
1334 s.CombJump(b, next, >Jumps)
1335
1336 case ssa.BlockARM64JUMPTABLE:
1337
1338
1339 p := s.Prog(arm64.AMOVD)
1340 p.From = genIndexedOperand(ssa.OpARM64MOVDloadidx8, b.Controls[1].Reg(), b.Controls[0].Reg())
1341 p.To.Type = obj.TYPE_REG
1342 p.To.Reg = arm64.REGTMP
1343 p = s.Prog(obj.AJMP)
1344 p.To.Type = obj.TYPE_MEM
1345 p.To.Reg = arm64.REGTMP
1346
1347 s.JumpTables = append(s.JumpTables, b)
1348
1349 default:
1350 b.Fatalf("branch not implemented: %s", b.LongString())
1351 }
1352 }
1353
1354 func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1355 p := s.Prog(loadByType(t))
1356 p.From.Type = obj.TYPE_MEM
1357 p.From.Name = obj.NAME_AUTO
1358 p.From.Sym = n.Linksym()
1359 p.From.Offset = n.FrameOffset() + off
1360 p.To.Type = obj.TYPE_REG
1361 p.To.Reg = reg
1362 return p
1363 }
1364
1365 func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
1366 p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
1367 p.To.Name = obj.NAME_PARAM
1368 p.To.Sym = n.Linksym()
1369 p.Pos = p.Pos.WithNotStmt()
1370 return p
1371 }
1372
View as plain text