1
2
3
4
5 package ssagen
6
7 import (
8 "bufio"
9 "bytes"
10 "fmt"
11 "go/constant"
12 "html"
13 "internal/buildcfg"
14 "os"
15 "path/filepath"
16 "sort"
17 "strings"
18
19 "cmd/compile/internal/abi"
20 "cmd/compile/internal/base"
21 "cmd/compile/internal/ir"
22 "cmd/compile/internal/liveness"
23 "cmd/compile/internal/objw"
24 "cmd/compile/internal/reflectdata"
25 "cmd/compile/internal/ssa"
26 "cmd/compile/internal/staticdata"
27 "cmd/compile/internal/typecheck"
28 "cmd/compile/internal/types"
29 "cmd/internal/obj"
30 "cmd/internal/objabi"
31 "cmd/internal/src"
32 "cmd/internal/sys"
33
34 rtabi "internal/abi"
35 )
36
37 var ssaConfig *ssa.Config
38 var ssaCaches []ssa.Cache
39
40 var ssaDump string
41 var ssaDir string
42 var ssaDumpStdout bool
43 var ssaDumpCFG string
44 const ssaDumpFile = "ssa.html"
45
46
47 var ssaDumpInlined []*ir.Func
48
49 func DumpInline(fn *ir.Func) {
50 if ssaDump != "" && ssaDump == ir.FuncName(fn) {
51 ssaDumpInlined = append(ssaDumpInlined, fn)
52 }
53 }
54
55 func InitEnv() {
56 ssaDump = os.Getenv("GOSSAFUNC")
57 ssaDir = os.Getenv("GOSSADIR")
58 if ssaDump != "" {
59 if strings.HasSuffix(ssaDump, "+") {
60 ssaDump = ssaDump[:len(ssaDump)-1]
61 ssaDumpStdout = true
62 }
63 spl := strings.Split(ssaDump, ":")
64 if len(spl) > 1 {
65 ssaDump = spl[0]
66 ssaDumpCFG = spl[1]
67 }
68 }
69 }
70
71 func InitConfig() {
72 types_ := ssa.NewTypes()
73
74 if Arch.SoftFloat {
75 softfloatInit()
76 }
77
78
79
80 _ = types.NewPtr(types.Types[types.TINTER])
81 _ = types.NewPtr(types.NewPtr(types.Types[types.TSTRING]))
82 _ = types.NewPtr(types.NewSlice(types.Types[types.TINTER]))
83 _ = types.NewPtr(types.NewPtr(types.ByteType))
84 _ = types.NewPtr(types.NewSlice(types.ByteType))
85 _ = types.NewPtr(types.NewSlice(types.Types[types.TSTRING]))
86 _ = types.NewPtr(types.NewPtr(types.NewPtr(types.Types[types.TUINT8])))
87 _ = types.NewPtr(types.Types[types.TINT16])
88 _ = types.NewPtr(types.Types[types.TINT64])
89 _ = types.NewPtr(types.ErrorType)
90 _ = types.NewPtr(reflectdata.MapType())
91 _ = types.NewPtr(deferstruct())
92 types.NewPtrCacheEnabled = false
93 ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
94 ssaConfig.Race = base.Flag.Race
95 ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
96
97
98 ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
99 ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
100 ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
101 ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
102 ir.Syms.CgoCheckMemmove = typecheck.LookupRuntimeFunc("cgoCheckMemmove")
103 ir.Syms.CgoCheckPtrWrite = typecheck.LookupRuntimeFunc("cgoCheckPtrWrite")
104 ir.Syms.CheckPtrAlignment = typecheck.LookupRuntimeFunc("checkptrAlignment")
105 ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
106 ir.Syms.Deferprocat = typecheck.LookupRuntimeFunc("deferprocat")
107 ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
108 ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
109 ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
110 ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
111 ir.Syms.GCWriteBarrier[0] = typecheck.LookupRuntimeFunc("gcWriteBarrier1")
112 ir.Syms.GCWriteBarrier[1] = typecheck.LookupRuntimeFunc("gcWriteBarrier2")
113 ir.Syms.GCWriteBarrier[2] = typecheck.LookupRuntimeFunc("gcWriteBarrier3")
114 ir.Syms.GCWriteBarrier[3] = typecheck.LookupRuntimeFunc("gcWriteBarrier4")
115 ir.Syms.GCWriteBarrier[4] = typecheck.LookupRuntimeFunc("gcWriteBarrier5")
116 ir.Syms.GCWriteBarrier[5] = typecheck.LookupRuntimeFunc("gcWriteBarrier6")
117 ir.Syms.GCWriteBarrier[6] = typecheck.LookupRuntimeFunc("gcWriteBarrier7")
118 ir.Syms.GCWriteBarrier[7] = typecheck.LookupRuntimeFunc("gcWriteBarrier8")
119 ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
120 ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
121 ir.Syms.InterfaceSwitch = typecheck.LookupRuntimeFunc("interfaceSwitch")
122 ir.Syms.Memmove = typecheck.LookupRuntimeFunc("memmove")
123 ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
124 ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
125 ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
126 ir.Syms.Asanread = typecheck.LookupRuntimeFunc("asanread")
127 ir.Syms.Asanwrite = typecheck.LookupRuntimeFunc("asanwrite")
128 ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
129 ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
130 ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
131 ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
132 ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
133 ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
134 ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
135 ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
136 ir.Syms.Racefuncenter = typecheck.LookupRuntimeFunc("racefuncenter")
137 ir.Syms.Racefuncexit = typecheck.LookupRuntimeFunc("racefuncexit")
138 ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
139 ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
140 ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
141 ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
142 ir.Syms.TypeAssert = typecheck.LookupRuntimeFunc("typeAssert")
143 ir.Syms.WBZero = typecheck.LookupRuntimeFunc("wbZero")
144 ir.Syms.WBMove = typecheck.LookupRuntimeFunc("wbMove")
145 ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT")
146 ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41")
147 ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA")
148 ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4")
149 ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS")
150 ir.Syms.Staticuint64s = typecheck.LookupRuntimeVar("staticuint64s")
151 ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
152 ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv")
153 ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier")
154 ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
155
156 if Arch.LinkArch.Family == sys.Wasm {
157 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
158 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
159 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
160 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
161 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
162 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
163 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
164 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
165 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
166 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
167 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
168 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
169 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
170 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
171 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
172 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
173 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("goPanicSliceConvert")
174 } else {
175 BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
176 BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
177 BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
178 BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
179 BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
180 BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
181 BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
182 BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
183 BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
184 BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
185 BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
186 BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
187 BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
188 BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
189 BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
190 BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
191 BoundsCheckFunc[ssa.BoundsConvert] = typecheck.LookupRuntimeFunc("panicSliceConvert")
192 }
193 if Arch.LinkArch.PtrSize == 4 {
194 ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
195 ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
196 ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
197 ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
198 ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
199 ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
200 ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
201 ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
202 ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
203 ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
204 ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
205 ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
206 ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
207 ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
208 ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
209 ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
210 }
211
212
213 ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
214 ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
215 ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
216 ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
217 }
218
219
220
221
222
223
224
225
226 func AbiForBodylessFuncStackMap(fn *ir.Func) *abi.ABIConfig {
227 return ssaConfig.ABI0.Copy()
228 }
229
230
231
232 func abiForFunc(fn *ir.Func, abi0, abi1 *abi.ABIConfig) *abi.ABIConfig {
233 if buildcfg.Experiment.RegabiArgs {
234
235 if fn == nil {
236 return abi1
237 }
238 switch fn.ABI {
239 case obj.ABI0:
240 return abi0
241 case obj.ABIInternal:
242
243
244 return abi1
245 }
246 base.Fatalf("function %v has unknown ABI %v", fn, fn.ABI)
247 panic("not reachable")
248 }
249
250 a := abi0
251 if fn != nil {
252 if fn.Pragma&ir.RegisterParams != 0 {
253 a = abi1
254 }
255 }
256 return a
257 }
258
259
260
261
262
263
264
265
266
267
268
269
270 func (s *state) emitOpenDeferInfo() {
271 firstOffset := s.openDefers[0].closureNode.FrameOffset()
272
273
274 for i, r := range s.openDefers {
275 have := r.closureNode.FrameOffset()
276 want := firstOffset + int64(i)*int64(types.PtrSize)
277 if have != want {
278 base.FatalfAt(s.curfn.Pos(), "unexpected frame offset for open-coded defer slot #%v: have %v, want %v", i, have, want)
279 }
280 }
281
282 x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
283 x.Set(obj.AttrContentAddressable, true)
284 s.curfn.LSym.Func().OpenCodedDeferInfo = x
285
286 off := 0
287 off = objw.Uvarint(x, off, uint64(-s.deferBitsTemp.FrameOffset()))
288 off = objw.Uvarint(x, off, uint64(-firstOffset))
289 }
290
291
292
293 func buildssa(fn *ir.Func, worker int) *ssa.Func {
294 name := ir.FuncName(fn)
295
296 abiSelf := abiForFunc(fn, ssaConfig.ABI0, ssaConfig.ABI1)
297
298 printssa := false
299
300
301 if strings.Contains(ssaDump, name) {
302 nameOptABI := name
303 if strings.Contains(ssaDump, ",") {
304 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
305 } else if strings.HasSuffix(ssaDump, ">") {
306 l := len(ssaDump)
307 if l >= 3 && ssaDump[l-3] == '<' {
308 nameOptABI = ssa.FuncNameABI(name, abiSelf.Which())
309 ssaDump = ssaDump[:l-3] + "," + ssaDump[l-2:l-1]
310 }
311 }
312 pkgDotName := base.Ctxt.Pkgpath + "." + nameOptABI
313 printssa = nameOptABI == ssaDump ||
314 pkgDotName == ssaDump ||
315 strings.HasSuffix(pkgDotName, ssaDump) && strings.HasSuffix(pkgDotName, "/"+ssaDump)
316 }
317
318 var astBuf *bytes.Buffer
319 if printssa {
320 astBuf = &bytes.Buffer{}
321 ir.FDumpList(astBuf, "buildssa-body", fn.Body)
322 if ssaDumpStdout {
323 fmt.Println("generating SSA for", name)
324 fmt.Print(astBuf.String())
325 }
326 }
327
328 var s state
329 s.pushLine(fn.Pos())
330 defer s.popLine()
331
332 s.hasdefer = fn.HasDefer()
333 if fn.Pragma&ir.CgoUnsafeArgs != 0 {
334 s.cgoUnsafeArgs = true
335 }
336 s.checkPtrEnabled = ir.ShouldCheckPtr(fn, 1)
337
338 if base.Flag.Cfg.Instrumenting && fn.Pragma&ir.Norace == 0 && !fn.Linksym().ABIWrapper() {
339 if !base.Flag.Race || !objabi.LookupPkgSpecial(fn.Sym().Pkg.Path).NoRaceFunc {
340 s.instrumentMemory = true
341 }
342 if base.Flag.Race {
343 s.instrumentEnterExit = true
344 }
345 }
346
347 fe := ssafn{
348 curfn: fn,
349 log: printssa && ssaDumpStdout,
350 }
351 s.curfn = fn
352
353 cache := &ssaCaches[worker]
354 cache.Reset()
355
356 s.f = ssaConfig.NewFunc(&fe, cache)
357 s.config = ssaConfig
358 s.f.Type = fn.Type()
359 s.f.Name = name
360 s.f.PrintOrHtmlSSA = printssa
361 if fn.Pragma&ir.Nosplit != 0 {
362 s.f.NoSplit = true
363 }
364 s.f.ABI0 = ssaConfig.ABI0
365 s.f.ABI1 = ssaConfig.ABI1
366 s.f.ABIDefault = abiForFunc(nil, ssaConfig.ABI0, ssaConfig.ABI1)
367 s.f.ABISelf = abiSelf
368
369 s.panics = map[funcLine]*ssa.Block{}
370 s.softFloat = s.config.SoftFloat
371
372
373 s.f.Entry = s.f.NewBlock(ssa.BlockPlain)
374 s.f.Entry.Pos = fn.Pos()
375
376 if printssa {
377 ssaDF := ssaDumpFile
378 if ssaDir != "" {
379 ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+s.f.NameABI()+".html")
380 ssaD := filepath.Dir(ssaDF)
381 os.MkdirAll(ssaD, 0755)
382 }
383 s.f.HTMLWriter = ssa.NewHTMLWriter(ssaDF, s.f, ssaDumpCFG)
384
385 dumpSourcesColumn(s.f.HTMLWriter, fn)
386 s.f.HTMLWriter.WriteAST("AST", astBuf)
387 }
388
389
390 s.labels = map[string]*ssaLabel{}
391 s.fwdVars = map[ir.Node]*ssa.Value{}
392 s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem)
393
394 s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.OpenCodedDeferDisallowed()
395 switch {
396 case base.Debug.NoOpenDefer != 0:
397 s.hasOpenDefers = false
398 case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && base.Ctxt.Arch.Name == "386":
399
400
401
402 s.hasOpenDefers = false
403 }
404 if s.hasOpenDefers && s.instrumentEnterExit {
405
406
407
408 s.hasOpenDefers = false
409 }
410 if s.hasOpenDefers {
411
412
413 for _, f := range s.curfn.Type().Results() {
414 if !f.Nname.(*ir.Name).OnStack() {
415 s.hasOpenDefers = false
416 break
417 }
418 }
419 }
420 if s.hasOpenDefers &&
421 s.curfn.NumReturns*s.curfn.NumDefers > 15 {
422
423
424
425
426
427 s.hasOpenDefers = false
428 }
429
430 s.sp = s.entryNewValue0(ssa.OpSP, types.Types[types.TUINTPTR])
431 s.sb = s.entryNewValue0(ssa.OpSB, types.Types[types.TUINTPTR])
432
433 s.startBlock(s.f.Entry)
434 s.vars[memVar] = s.startmem
435 if s.hasOpenDefers {
436
437
438
439 deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
440 deferBitsTemp.SetAddrtaken(true)
441 s.deferBitsTemp = deferBitsTemp
442
443 startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
444 s.vars[deferBitsVar] = startDeferBits
445 s.deferBitsAddr = s.addr(deferBitsTemp)
446 s.store(types.Types[types.TUINT8], s.deferBitsAddr, startDeferBits)
447
448
449
450
451
452 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, deferBitsTemp, s.mem(), false)
453 }
454
455 var params *abi.ABIParamResultInfo
456 params = s.f.ABISelf.ABIAnalyze(fn.Type(), true)
457
458
459
460
461
462
463 var debugInfo ssa.FuncDebug
464 for _, n := range fn.Dcl {
465 if n.Class == ir.PPARAMOUT && n.IsOutputParamInRegisters() {
466 debugInfo.RegOutputParams = append(debugInfo.RegOutputParams, n)
467 }
468 }
469 fn.DebugInfo = &debugInfo
470
471
472 s.decladdrs = map[*ir.Name]*ssa.Value{}
473 for _, n := range fn.Dcl {
474 switch n.Class {
475 case ir.PPARAM:
476
477 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
478 case ir.PPARAMOUT:
479 s.decladdrs[n] = s.entryNewValue2A(ssa.OpLocalAddr, types.NewPtr(n.Type()), n, s.sp, s.startmem)
480 case ir.PAUTO:
481
482
483 default:
484 s.Fatalf("local variable with class %v unimplemented", n.Class)
485 }
486 }
487
488 s.f.OwnAux = ssa.OwnAuxCall(fn.LSym, params)
489
490
491 for _, n := range fn.Dcl {
492 if n.Class == ir.PPARAM {
493 if s.canSSA(n) {
494 v := s.newValue0A(ssa.OpArg, n.Type(), n)
495 s.vars[n] = v
496 s.addNamedValue(n, v)
497 } else {
498 paramAssignment := ssa.ParamAssignmentForArgName(s.f, n)
499 if len(paramAssignment.Registers) > 0 {
500 if ssa.CanSSA(n.Type()) {
501 v := s.newValue0A(ssa.OpArg, n.Type(), n)
502 s.store(n.Type(), s.decladdrs[n], v)
503 } else {
504
505
506 s.storeParameterRegsToStack(s.f.ABISelf, paramAssignment, n, s.decladdrs[n], false)
507 }
508 }
509 }
510 }
511 }
512
513
514 if fn.Needctxt() {
515 clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
516 offset := int64(types.PtrSize)
517 for _, n := range fn.ClosureVars {
518 typ := n.Type()
519 if !n.Byval() {
520 typ = types.NewPtr(typ)
521 }
522
523 offset = types.RoundUp(offset, typ.Alignment())
524 ptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(typ), offset, clo)
525 offset += typ.Size()
526
527
528
529
530
531
532
533
534
535 if n.Byval() && !n.Addrtaken() && ssa.CanSSA(n.Type()) {
536 n.Class = ir.PAUTO
537 fn.Dcl = append(fn.Dcl, n)
538 s.assign(n, s.load(n.Type(), ptr), false, 0)
539 continue
540 }
541
542 if !n.Byval() {
543 ptr = s.load(typ, ptr)
544 }
545 s.setHeapaddr(fn.Pos(), n, ptr)
546 }
547 }
548
549
550 if s.instrumentEnterExit {
551 s.rtcall(ir.Syms.Racefuncenter, true, nil, s.newValue0(ssa.OpGetCallerPC, types.Types[types.TUINTPTR]))
552 }
553 s.zeroResults()
554 s.paramsToHeap()
555 s.stmtList(fn.Body)
556
557
558 if s.curBlock != nil {
559 s.pushLine(fn.Endlineno)
560 s.exit()
561 s.popLine()
562 }
563
564 for _, b := range s.f.Blocks {
565 if b.Pos != src.NoXPos {
566 s.updateUnsetPredPos(b)
567 }
568 }
569
570 s.f.HTMLWriter.WritePhase("before insert phis", "before insert phis")
571
572 s.insertPhis()
573
574
575 ssa.Compile(s.f)
576
577 fe.AllocFrame(s.f)
578
579 if len(s.openDefers) != 0 {
580 s.emitOpenDeferInfo()
581 }
582
583
584
585
586
587
588 for _, p := range params.InParams() {
589 typs, offs := p.RegisterTypesAndOffsets()
590 for i, t := range typs {
591 o := offs[i]
592 fo := p.FrameOffset(params)
593 reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
594 s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
595 }
596 }
597
598 return s.f
599 }
600
601 func (s *state) storeParameterRegsToStack(abi *abi.ABIConfig, paramAssignment *abi.ABIParamAssignment, n *ir.Name, addr *ssa.Value, pointersOnly bool) {
602 typs, offs := paramAssignment.RegisterTypesAndOffsets()
603 for i, t := range typs {
604 if pointersOnly && !t.IsPtrShaped() {
605 continue
606 }
607 r := paramAssignment.Registers[i]
608 o := offs[i]
609 op, reg := ssa.ArgOpAndRegisterFor(r, abi)
610 aux := &ssa.AuxNameOffset{Name: n, Offset: o}
611 v := s.newValue0I(op, t, reg)
612 v.Aux = aux
613 p := s.newValue1I(ssa.OpOffPtr, types.NewPtr(t), o, addr)
614 s.store(t, p, v)
615 }
616 }
617
618
619
620
621
622
623
624 func (s *state) zeroResults() {
625 for _, f := range s.curfn.Type().Results() {
626 n := f.Nname.(*ir.Name)
627 if !n.OnStack() {
628
629
630
631 continue
632 }
633
634 if typ := n.Type(); ssa.CanSSA(typ) {
635 s.assign(n, s.zeroVal(typ), false, 0)
636 } else {
637 if typ.HasPointers() {
638 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
639 }
640 s.zero(n.Type(), s.decladdrs[n])
641 }
642 }
643 }
644
645
646
647 func (s *state) paramsToHeap() {
648 do := func(params []*types.Field) {
649 for _, f := range params {
650 if f.Nname == nil {
651 continue
652 }
653 n := f.Nname.(*ir.Name)
654 if ir.IsBlank(n) || n.OnStack() {
655 continue
656 }
657 s.newHeapaddr(n)
658 if n.Class == ir.PPARAM {
659 s.move(n.Type(), s.expr(n.Heapaddr), s.decladdrs[n])
660 }
661 }
662 }
663
664 typ := s.curfn.Type()
665 do(typ.Recvs())
666 do(typ.Params())
667 do(typ.Results())
668 }
669
670
671 func (s *state) newHeapaddr(n *ir.Name) {
672 s.setHeapaddr(n.Pos(), n, s.newObject(n.Type(), nil))
673 }
674
675
676
677 func (s *state) setHeapaddr(pos src.XPos, n *ir.Name, ptr *ssa.Value) {
678 if !ptr.Type.IsPtr() || !types.Identical(n.Type(), ptr.Type.Elem()) {
679 base.FatalfAt(n.Pos(), "setHeapaddr %L with type %v", n, ptr.Type)
680 }
681
682
683 sym := &types.Sym{Name: "&" + n.Sym().Name, Pkg: types.LocalPkg}
684 addr := s.curfn.NewLocal(pos, sym, types.NewPtr(n.Type()))
685 addr.SetUsed(true)
686 types.CalcSize(addr.Type())
687
688 if n.Class == ir.PPARAMOUT {
689 addr.SetIsOutputParamHeapAddr(true)
690 }
691
692 n.Heapaddr = addr
693 s.assign(addr, ptr, false, 0)
694 }
695
696
697 func (s *state) newObject(typ *types.Type, rtype *ssa.Value) *ssa.Value {
698 if typ.Size() == 0 {
699 return s.newValue1A(ssa.OpAddr, types.NewPtr(typ), ir.Syms.Zerobase, s.sb)
700 }
701 if rtype == nil {
702 rtype = s.reflectType(typ)
703 }
704 return s.rtcall(ir.Syms.Newobject, true, []*types.Type{types.NewPtr(typ)}, rtype)[0]
705 }
706
707 func (s *state) checkPtrAlignment(n *ir.ConvExpr, v *ssa.Value, count *ssa.Value) {
708 if !n.Type().IsPtr() {
709 s.Fatalf("expected pointer type: %v", n.Type())
710 }
711 elem, rtypeExpr := n.Type().Elem(), n.ElemRType
712 if count != nil {
713 if !elem.IsArray() {
714 s.Fatalf("expected array type: %v", elem)
715 }
716 elem, rtypeExpr = elem.Elem(), n.ElemElemRType
717 }
718 size := elem.Size()
719
720 if elem.Alignment() == 1 && (size == 0 || size == 1 || count == nil) {
721 return
722 }
723 if count == nil {
724 count = s.constInt(types.Types[types.TUINTPTR], 1)
725 }
726 if count.Type.Size() != s.config.PtrSize {
727 s.Fatalf("expected count fit to a uintptr size, have: %d, want: %d", count.Type.Size(), s.config.PtrSize)
728 }
729 var rtype *ssa.Value
730 if rtypeExpr != nil {
731 rtype = s.expr(rtypeExpr)
732 } else {
733 rtype = s.reflectType(elem)
734 }
735 s.rtcall(ir.Syms.CheckPtrAlignment, true, nil, v, rtype, count)
736 }
737
738
739
740 func (s *state) reflectType(typ *types.Type) *ssa.Value {
741
742
743 lsym := reflectdata.TypeLinksym(typ)
744 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(types.Types[types.TUINT8]), lsym, s.sb)
745 }
746
747 func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *ir.Func) {
748
749 fname := base.Ctxt.PosTable.Pos(fn.Pos()).Filename()
750 targetFn, err := readFuncLines(fname, fn.Pos().Line(), fn.Endlineno.Line())
751 if err != nil {
752 writer.Logf("cannot read sources for function %v: %v", fn, err)
753 }
754
755
756 var inlFns []*ssa.FuncLines
757 for _, fi := range ssaDumpInlined {
758 elno := fi.Endlineno
759 fname := base.Ctxt.PosTable.Pos(fi.Pos()).Filename()
760 fnLines, err := readFuncLines(fname, fi.Pos().Line(), elno.Line())
761 if err != nil {
762 writer.Logf("cannot read sources for inlined function %v: %v", fi, err)
763 continue
764 }
765 inlFns = append(inlFns, fnLines)
766 }
767
768 sort.Sort(ssa.ByTopo(inlFns))
769 if targetFn != nil {
770 inlFns = append([]*ssa.FuncLines{targetFn}, inlFns...)
771 }
772
773 writer.WriteSources("sources", inlFns)
774 }
775
776 func readFuncLines(file string, start, end uint) (*ssa.FuncLines, error) {
777 f, err := os.Open(os.ExpandEnv(file))
778 if err != nil {
779 return nil, err
780 }
781 defer f.Close()
782 var lines []string
783 ln := uint(1)
784 scanner := bufio.NewScanner(f)
785 for scanner.Scan() && ln <= end {
786 if ln >= start {
787 lines = append(lines, scanner.Text())
788 }
789 ln++
790 }
791 return &ssa.FuncLines{Filename: file, StartLineno: start, Lines: lines}, nil
792 }
793
794
795
796
797 func (s *state) updateUnsetPredPos(b *ssa.Block) {
798 if b.Pos == src.NoXPos {
799 s.Fatalf("Block %s should have a position", b)
800 }
801 bestPos := src.NoXPos
802 for _, e := range b.Preds {
803 p := e.Block()
804 if !p.LackingPos() {
805 continue
806 }
807 if bestPos == src.NoXPos {
808 bestPos = b.Pos
809 for _, v := range b.Values {
810 if v.LackingPos() {
811 continue
812 }
813 if v.Pos != src.NoXPos {
814
815
816 bestPos = v.Pos
817 break
818 }
819 }
820 }
821 p.Pos = bestPos
822 s.updateUnsetPredPos(p)
823 }
824 }
825
826
827 type openDeferInfo struct {
828
829 n *ir.CallExpr
830
831
832 closure *ssa.Value
833
834
835
836 closureNode *ir.Name
837 }
838
839 type state struct {
840
841 config *ssa.Config
842
843
844 f *ssa.Func
845
846
847 curfn *ir.Func
848
849
850 labels map[string]*ssaLabel
851
852
853 breakTo *ssa.Block
854 continueTo *ssa.Block
855
856
857 curBlock *ssa.Block
858
859
860
861
862 vars map[ir.Node]*ssa.Value
863
864
865
866
867 fwdVars map[ir.Node]*ssa.Value
868
869
870 defvars []map[ir.Node]*ssa.Value
871
872
873 decladdrs map[*ir.Name]*ssa.Value
874
875
876 startmem *ssa.Value
877 sp *ssa.Value
878 sb *ssa.Value
879
880 deferBitsAddr *ssa.Value
881 deferBitsTemp *ir.Name
882
883
884 line []src.XPos
885
886 lastPos src.XPos
887
888
889
890 panics map[funcLine]*ssa.Block
891
892 cgoUnsafeArgs bool
893 hasdefer bool
894 softFloat bool
895 hasOpenDefers bool
896 checkPtrEnabled bool
897 instrumentEnterExit bool
898 instrumentMemory bool
899
900
901
902
903 openDefers []*openDeferInfo
904
905
906
907
908 lastDeferExit *ssa.Block
909 lastDeferFinalBlock *ssa.Block
910 lastDeferCount int
911
912 prevCall *ssa.Value
913 }
914
915 type funcLine struct {
916 f *obj.LSym
917 base *src.PosBase
918 line uint
919 }
920
921 type ssaLabel struct {
922 target *ssa.Block
923 breakTarget *ssa.Block
924 continueTarget *ssa.Block
925 }
926
927
928 func (s *state) label(sym *types.Sym) *ssaLabel {
929 lab := s.labels[sym.Name]
930 if lab == nil {
931 lab = new(ssaLabel)
932 s.labels[sym.Name] = lab
933 }
934 return lab
935 }
936
937 func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
938 func (s *state) Log() bool { return s.f.Log() }
939 func (s *state) Fatalf(msg string, args ...interface{}) {
940 s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
941 }
942 func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
943 func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
944
945 func ssaMarker(name string) *ir.Name {
946 return ir.NewNameAt(base.Pos, &types.Sym{Name: name}, nil)
947 }
948
949 var (
950
951 memVar = ssaMarker("mem")
952
953
954 ptrVar = ssaMarker("ptr")
955 lenVar = ssaMarker("len")
956 capVar = ssaMarker("cap")
957 typVar = ssaMarker("typ")
958 okVar = ssaMarker("ok")
959 deferBitsVar = ssaMarker("deferBits")
960 hashVar = ssaMarker("hash")
961 )
962
963
964 func (s *state) startBlock(b *ssa.Block) {
965 if s.curBlock != nil {
966 s.Fatalf("starting block %v when block %v has not ended", b, s.curBlock)
967 }
968 s.curBlock = b
969 s.vars = map[ir.Node]*ssa.Value{}
970 for n := range s.fwdVars {
971 delete(s.fwdVars, n)
972 }
973 }
974
975
976
977
978 func (s *state) endBlock() *ssa.Block {
979 b := s.curBlock
980 if b == nil {
981 return nil
982 }
983 for len(s.defvars) <= int(b.ID) {
984 s.defvars = append(s.defvars, nil)
985 }
986 s.defvars[b.ID] = s.vars
987 s.curBlock = nil
988 s.vars = nil
989 if b.LackingPos() {
990
991
992
993 b.Pos = src.NoXPos
994 } else {
995 b.Pos = s.lastPos
996 }
997 return b
998 }
999
1000
1001 func (s *state) pushLine(line src.XPos) {
1002 if !line.IsKnown() {
1003
1004
1005 line = s.peekPos()
1006 if base.Flag.K != 0 {
1007 base.Warn("buildssa: unknown position (line 0)")
1008 }
1009 } else {
1010 s.lastPos = line
1011 }
1012
1013 s.line = append(s.line, line)
1014 }
1015
1016
1017 func (s *state) popLine() {
1018 s.line = s.line[:len(s.line)-1]
1019 }
1020
1021
1022 func (s *state) peekPos() src.XPos {
1023 return s.line[len(s.line)-1]
1024 }
1025
1026
1027 func (s *state) newValue0(op ssa.Op, t *types.Type) *ssa.Value {
1028 return s.curBlock.NewValue0(s.peekPos(), op, t)
1029 }
1030
1031
1032 func (s *state) newValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1033 return s.curBlock.NewValue0A(s.peekPos(), op, t, aux)
1034 }
1035
1036
1037 func (s *state) newValue0I(op ssa.Op, t *types.Type, auxint int64) *ssa.Value {
1038 return s.curBlock.NewValue0I(s.peekPos(), op, t, auxint)
1039 }
1040
1041
1042 func (s *state) newValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1043 return s.curBlock.NewValue1(s.peekPos(), op, t, arg)
1044 }
1045
1046
1047 func (s *state) newValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1048 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1049 }
1050
1051
1052
1053
1054 func (s *state) newValue1Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value, isStmt bool) *ssa.Value {
1055 if isStmt {
1056 return s.curBlock.NewValue1A(s.peekPos(), op, t, aux, arg)
1057 }
1058 return s.curBlock.NewValue1A(s.peekPos().WithNotStmt(), op, t, aux, arg)
1059 }
1060
1061
1062 func (s *state) newValue1I(op ssa.Op, t *types.Type, aux int64, arg *ssa.Value) *ssa.Value {
1063 return s.curBlock.NewValue1I(s.peekPos(), op, t, aux, arg)
1064 }
1065
1066
1067 func (s *state) newValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1068 return s.curBlock.NewValue2(s.peekPos(), op, t, arg0, arg1)
1069 }
1070
1071
1072 func (s *state) newValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1073 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1074 }
1075
1076
1077
1078
1079 func (s *state) newValue2Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value, isStmt bool) *ssa.Value {
1080 if isStmt {
1081 return s.curBlock.NewValue2A(s.peekPos(), op, t, aux, arg0, arg1)
1082 }
1083 return s.curBlock.NewValue2A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1)
1084 }
1085
1086
1087 func (s *state) newValue2I(op ssa.Op, t *types.Type, aux int64, arg0, arg1 *ssa.Value) *ssa.Value {
1088 return s.curBlock.NewValue2I(s.peekPos(), op, t, aux, arg0, arg1)
1089 }
1090
1091
1092 func (s *state) newValue3(op ssa.Op, t *types.Type, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1093 return s.curBlock.NewValue3(s.peekPos(), op, t, arg0, arg1, arg2)
1094 }
1095
1096
1097 func (s *state) newValue3I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1098 return s.curBlock.NewValue3I(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1099 }
1100
1101
1102 func (s *state) newValue3A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value) *ssa.Value {
1103 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1104 }
1105
1106
1107
1108
1109 func (s *state) newValue3Apos(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1, arg2 *ssa.Value, isStmt bool) *ssa.Value {
1110 if isStmt {
1111 return s.curBlock.NewValue3A(s.peekPos(), op, t, aux, arg0, arg1, arg2)
1112 }
1113 return s.curBlock.NewValue3A(s.peekPos().WithNotStmt(), op, t, aux, arg0, arg1, arg2)
1114 }
1115
1116
1117 func (s *state) newValue4(op ssa.Op, t *types.Type, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1118 return s.curBlock.NewValue4(s.peekPos(), op, t, arg0, arg1, arg2, arg3)
1119 }
1120
1121
1122 func (s *state) newValue4I(op ssa.Op, t *types.Type, aux int64, arg0, arg1, arg2, arg3 *ssa.Value) *ssa.Value {
1123 return s.curBlock.NewValue4I(s.peekPos(), op, t, aux, arg0, arg1, arg2, arg3)
1124 }
1125
1126 func (s *state) entryBlock() *ssa.Block {
1127 b := s.f.Entry
1128 if base.Flag.N > 0 && s.curBlock != nil {
1129
1130
1131
1132
1133 b = s.curBlock
1134 }
1135 return b
1136 }
1137
1138
1139 func (s *state) entryNewValue0(op ssa.Op, t *types.Type) *ssa.Value {
1140 return s.entryBlock().NewValue0(src.NoXPos, op, t)
1141 }
1142
1143
1144 func (s *state) entryNewValue0A(op ssa.Op, t *types.Type, aux ssa.Aux) *ssa.Value {
1145 return s.entryBlock().NewValue0A(src.NoXPos, op, t, aux)
1146 }
1147
1148
1149 func (s *state) entryNewValue1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1150 return s.entryBlock().NewValue1(src.NoXPos, op, t, arg)
1151 }
1152
1153
1154 func (s *state) entryNewValue1I(op ssa.Op, t *types.Type, auxint int64, arg *ssa.Value) *ssa.Value {
1155 return s.entryBlock().NewValue1I(src.NoXPos, op, t, auxint, arg)
1156 }
1157
1158
1159 func (s *state) entryNewValue1A(op ssa.Op, t *types.Type, aux ssa.Aux, arg *ssa.Value) *ssa.Value {
1160 return s.entryBlock().NewValue1A(src.NoXPos, op, t, aux, arg)
1161 }
1162
1163
1164 func (s *state) entryNewValue2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1165 return s.entryBlock().NewValue2(src.NoXPos, op, t, arg0, arg1)
1166 }
1167
1168
1169 func (s *state) entryNewValue2A(op ssa.Op, t *types.Type, aux ssa.Aux, arg0, arg1 *ssa.Value) *ssa.Value {
1170 return s.entryBlock().NewValue2A(src.NoXPos, op, t, aux, arg0, arg1)
1171 }
1172
1173
1174 func (s *state) constSlice(t *types.Type) *ssa.Value {
1175 return s.f.ConstSlice(t)
1176 }
1177 func (s *state) constInterface(t *types.Type) *ssa.Value {
1178 return s.f.ConstInterface(t)
1179 }
1180 func (s *state) constNil(t *types.Type) *ssa.Value { return s.f.ConstNil(t) }
1181 func (s *state) constEmptyString(t *types.Type) *ssa.Value {
1182 return s.f.ConstEmptyString(t)
1183 }
1184 func (s *state) constBool(c bool) *ssa.Value {
1185 return s.f.ConstBool(types.Types[types.TBOOL], c)
1186 }
1187 func (s *state) constInt8(t *types.Type, c int8) *ssa.Value {
1188 return s.f.ConstInt8(t, c)
1189 }
1190 func (s *state) constInt16(t *types.Type, c int16) *ssa.Value {
1191 return s.f.ConstInt16(t, c)
1192 }
1193 func (s *state) constInt32(t *types.Type, c int32) *ssa.Value {
1194 return s.f.ConstInt32(t, c)
1195 }
1196 func (s *state) constInt64(t *types.Type, c int64) *ssa.Value {
1197 return s.f.ConstInt64(t, c)
1198 }
1199 func (s *state) constFloat32(t *types.Type, c float64) *ssa.Value {
1200 return s.f.ConstFloat32(t, c)
1201 }
1202 func (s *state) constFloat64(t *types.Type, c float64) *ssa.Value {
1203 return s.f.ConstFloat64(t, c)
1204 }
1205 func (s *state) constInt(t *types.Type, c int64) *ssa.Value {
1206 if s.config.PtrSize == 8 {
1207 return s.constInt64(t, c)
1208 }
1209 if int64(int32(c)) != c {
1210 s.Fatalf("integer constant too big %d", c)
1211 }
1212 return s.constInt32(t, int32(c))
1213 }
1214 func (s *state) constOffPtrSP(t *types.Type, c int64) *ssa.Value {
1215 return s.f.ConstOffPtrSP(t, c, s.sp)
1216 }
1217
1218
1219
1220 func (s *state) newValueOrSfCall1(op ssa.Op, t *types.Type, arg *ssa.Value) *ssa.Value {
1221 if s.softFloat {
1222 if c, ok := s.sfcall(op, arg); ok {
1223 return c
1224 }
1225 }
1226 return s.newValue1(op, t, arg)
1227 }
1228 func (s *state) newValueOrSfCall2(op ssa.Op, t *types.Type, arg0, arg1 *ssa.Value) *ssa.Value {
1229 if s.softFloat {
1230 if c, ok := s.sfcall(op, arg0, arg1); ok {
1231 return c
1232 }
1233 }
1234 return s.newValue2(op, t, arg0, arg1)
1235 }
1236
1237 type instrumentKind uint8
1238
1239 const (
1240 instrumentRead = iota
1241 instrumentWrite
1242 instrumentMove
1243 )
1244
1245 func (s *state) instrument(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1246 s.instrument2(t, addr, nil, kind)
1247 }
1248
1249
1250
1251
1252 func (s *state) instrumentFields(t *types.Type, addr *ssa.Value, kind instrumentKind) {
1253 if !(base.Flag.MSan || base.Flag.ASan) || !t.IsStruct() {
1254 s.instrument(t, addr, kind)
1255 return
1256 }
1257 for _, f := range t.Fields() {
1258 if f.Sym.IsBlank() {
1259 continue
1260 }
1261 offptr := s.newValue1I(ssa.OpOffPtr, types.NewPtr(f.Type), f.Offset, addr)
1262 s.instrumentFields(f.Type, offptr, kind)
1263 }
1264 }
1265
1266 func (s *state) instrumentMove(t *types.Type, dst, src *ssa.Value) {
1267 if base.Flag.MSan {
1268 s.instrument2(t, dst, src, instrumentMove)
1269 } else {
1270 s.instrument(t, src, instrumentRead)
1271 s.instrument(t, dst, instrumentWrite)
1272 }
1273 }
1274
1275 func (s *state) instrument2(t *types.Type, addr, addr2 *ssa.Value, kind instrumentKind) {
1276 if !s.instrumentMemory {
1277 return
1278 }
1279
1280 w := t.Size()
1281 if w == 0 {
1282 return
1283 }
1284
1285 if ssa.IsSanitizerSafeAddr(addr) {
1286 return
1287 }
1288
1289 var fn *obj.LSym
1290 needWidth := false
1291
1292 if addr2 != nil && kind != instrumentMove {
1293 panic("instrument2: non-nil addr2 for non-move instrumentation")
1294 }
1295
1296 if base.Flag.MSan {
1297 switch kind {
1298 case instrumentRead:
1299 fn = ir.Syms.Msanread
1300 case instrumentWrite:
1301 fn = ir.Syms.Msanwrite
1302 case instrumentMove:
1303 fn = ir.Syms.Msanmove
1304 default:
1305 panic("unreachable")
1306 }
1307 needWidth = true
1308 } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 {
1309
1310
1311
1312 switch kind {
1313 case instrumentRead:
1314 fn = ir.Syms.Racereadrange
1315 case instrumentWrite:
1316 fn = ir.Syms.Racewriterange
1317 default:
1318 panic("unreachable")
1319 }
1320 needWidth = true
1321 } else if base.Flag.Race {
1322
1323
1324 switch kind {
1325 case instrumentRead:
1326 fn = ir.Syms.Raceread
1327 case instrumentWrite:
1328 fn = ir.Syms.Racewrite
1329 default:
1330 panic("unreachable")
1331 }
1332 } else if base.Flag.ASan {
1333 switch kind {
1334 case instrumentRead:
1335 fn = ir.Syms.Asanread
1336 case instrumentWrite:
1337 fn = ir.Syms.Asanwrite
1338 default:
1339 panic("unreachable")
1340 }
1341 needWidth = true
1342 } else {
1343 panic("unreachable")
1344 }
1345
1346 args := []*ssa.Value{addr}
1347 if addr2 != nil {
1348 args = append(args, addr2)
1349 }
1350 if needWidth {
1351 args = append(args, s.constInt(types.Types[types.TUINTPTR], w))
1352 }
1353 s.rtcall(fn, true, nil, args...)
1354 }
1355
1356 func (s *state) load(t *types.Type, src *ssa.Value) *ssa.Value {
1357 s.instrumentFields(t, src, instrumentRead)
1358 return s.rawLoad(t, src)
1359 }
1360
1361 func (s *state) rawLoad(t *types.Type, src *ssa.Value) *ssa.Value {
1362 return s.newValue2(ssa.OpLoad, t, src, s.mem())
1363 }
1364
1365 func (s *state) store(t *types.Type, dst, val *ssa.Value) {
1366 s.vars[memVar] = s.newValue3A(ssa.OpStore, types.TypeMem, t, dst, val, s.mem())
1367 }
1368
1369 func (s *state) zero(t *types.Type, dst *ssa.Value) {
1370 s.instrument(t, dst, instrumentWrite)
1371 store := s.newValue2I(ssa.OpZero, types.TypeMem, t.Size(), dst, s.mem())
1372 store.Aux = t
1373 s.vars[memVar] = store
1374 }
1375
1376 func (s *state) move(t *types.Type, dst, src *ssa.Value) {
1377 s.moveWhichMayOverlap(t, dst, src, false)
1378 }
1379 func (s *state) moveWhichMayOverlap(t *types.Type, dst, src *ssa.Value, mayOverlap bool) {
1380 s.instrumentMove(t, dst, src)
1381 if mayOverlap && t.IsArray() && t.NumElem() > 1 && !ssa.IsInlinableMemmove(dst, src, t.Size(), s.f.Config) {
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405 if t.HasPointers() {
1406 s.rtcall(ir.Syms.Typedmemmove, true, nil, s.reflectType(t), dst, src)
1407
1408
1409
1410
1411 s.curfn.SetWBPos(s.peekPos())
1412 } else {
1413 s.rtcall(ir.Syms.Memmove, true, nil, dst, src, s.constInt(types.Types[types.TUINTPTR], t.Size()))
1414 }
1415 ssa.LogLargeCopy(s.f.Name, s.peekPos(), t.Size())
1416 return
1417 }
1418 store := s.newValue3I(ssa.OpMove, types.TypeMem, t.Size(), dst, src, s.mem())
1419 store.Aux = t
1420 s.vars[memVar] = store
1421 }
1422
1423
1424 func (s *state) stmtList(l ir.Nodes) {
1425 for _, n := range l {
1426 s.stmt(n)
1427 }
1428 }
1429
1430
1431 func (s *state) stmt(n ir.Node) {
1432 s.pushLine(n.Pos())
1433 defer s.popLine()
1434
1435
1436
1437 if s.curBlock == nil && n.Op() != ir.OLABEL {
1438 return
1439 }
1440
1441 s.stmtList(n.Init())
1442 switch n.Op() {
1443
1444 case ir.OBLOCK:
1445 n := n.(*ir.BlockStmt)
1446 s.stmtList(n.List)
1447
1448 case ir.OFALL:
1449
1450
1451 case ir.OCALLFUNC:
1452 n := n.(*ir.CallExpr)
1453 if ir.IsIntrinsicCall(n) {
1454 s.intrinsicCall(n)
1455 return
1456 }
1457 fallthrough
1458
1459 case ir.OCALLINTER:
1460 n := n.(*ir.CallExpr)
1461 s.callResult(n, callNormal)
1462 if n.Op() == ir.OCALLFUNC && n.Fun.Op() == ir.ONAME && n.Fun.(*ir.Name).Class == ir.PFUNC {
1463 if fn := n.Fun.Sym().Name; base.Flag.CompilingRuntime && fn == "throw" ||
1464 n.Fun.Sym().Pkg == ir.Pkgs.Runtime && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap" || fn == "panicunsafeslicelen" || fn == "panicunsafeslicenilptr" || fn == "panicunsafestringlen" || fn == "panicunsafestringnilptr") {
1465 m := s.mem()
1466 b := s.endBlock()
1467 b.Kind = ssa.BlockExit
1468 b.SetControl(m)
1469
1470
1471
1472 }
1473 }
1474 case ir.ODEFER:
1475 n := n.(*ir.GoDeferStmt)
1476 if base.Debug.Defer > 0 {
1477 var defertype string
1478 if s.hasOpenDefers {
1479 defertype = "open-coded"
1480 } else if n.Esc() == ir.EscNever {
1481 defertype = "stack-allocated"
1482 } else {
1483 defertype = "heap-allocated"
1484 }
1485 base.WarnfAt(n.Pos(), "%s defer", defertype)
1486 }
1487 if s.hasOpenDefers {
1488 s.openDeferRecord(n.Call.(*ir.CallExpr))
1489 } else {
1490 d := callDefer
1491 if n.Esc() == ir.EscNever && n.DeferAt == nil {
1492 d = callDeferStack
1493 }
1494 s.call(n.Call.(*ir.CallExpr), d, false, n.DeferAt)
1495 }
1496 case ir.OGO:
1497 n := n.(*ir.GoDeferStmt)
1498 s.callResult(n.Call.(*ir.CallExpr), callGo)
1499
1500 case ir.OAS2DOTTYPE:
1501 n := n.(*ir.AssignListStmt)
1502 var res, resok *ssa.Value
1503 if n.Rhs[0].Op() == ir.ODOTTYPE2 {
1504 res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
1505 } else {
1506 res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
1507 }
1508 deref := false
1509 if !ssa.CanSSA(n.Rhs[0].Type()) {
1510 if res.Op != ssa.OpLoad {
1511 s.Fatalf("dottype of non-load")
1512 }
1513 mem := s.mem()
1514 if res.Args[1] != mem {
1515 s.Fatalf("memory no longer live from 2-result dottype load")
1516 }
1517 deref = true
1518 res = res.Args[0]
1519 }
1520 s.assign(n.Lhs[0], res, deref, 0)
1521 s.assign(n.Lhs[1], resok, false, 0)
1522 return
1523
1524 case ir.OAS2FUNC:
1525
1526 n := n.(*ir.AssignListStmt)
1527 call := n.Rhs[0].(*ir.CallExpr)
1528 if !ir.IsIntrinsicCall(call) {
1529 s.Fatalf("non-intrinsic AS2FUNC not expanded %v", call)
1530 }
1531 v := s.intrinsicCall(call)
1532 v1 := s.newValue1(ssa.OpSelect0, n.Lhs[0].Type(), v)
1533 v2 := s.newValue1(ssa.OpSelect1, n.Lhs[1].Type(), v)
1534 s.assign(n.Lhs[0], v1, false, 0)
1535 s.assign(n.Lhs[1], v2, false, 0)
1536 return
1537
1538 case ir.ODCL:
1539 n := n.(*ir.Decl)
1540 if v := n.X; v.Esc() == ir.EscHeap {
1541 s.newHeapaddr(v)
1542 }
1543
1544 case ir.OLABEL:
1545 n := n.(*ir.LabelStmt)
1546 sym := n.Label
1547 if sym.IsBlank() {
1548
1549 break
1550 }
1551 lab := s.label(sym)
1552
1553
1554 if lab.target == nil {
1555 lab.target = s.f.NewBlock(ssa.BlockPlain)
1556 }
1557
1558
1559
1560 if s.curBlock != nil {
1561 b := s.endBlock()
1562 b.AddEdgeTo(lab.target)
1563 }
1564 s.startBlock(lab.target)
1565
1566 case ir.OGOTO:
1567 n := n.(*ir.BranchStmt)
1568 sym := n.Label
1569
1570 lab := s.label(sym)
1571 if lab.target == nil {
1572 lab.target = s.f.NewBlock(ssa.BlockPlain)
1573 }
1574
1575 b := s.endBlock()
1576 b.Pos = s.lastPos.WithIsStmt()
1577 b.AddEdgeTo(lab.target)
1578
1579 case ir.OAS:
1580 n := n.(*ir.AssignStmt)
1581 if n.X == n.Y && n.X.Op() == ir.ONAME {
1582
1583
1584
1585
1586
1587
1588
1589 return
1590 }
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601 mayOverlap := n.X.Op() == ir.ODEREF && (n.Y != nil && n.Y.Op() == ir.ODEREF)
1602 if n.Y != nil && n.Y.Op() == ir.ODEREF {
1603 p := n.Y.(*ir.StarExpr).X
1604 for p.Op() == ir.OCONVNOP {
1605 p = p.(*ir.ConvExpr).X
1606 }
1607 if p.Op() == ir.OSPTR && p.(*ir.UnaryExpr).X.Type().IsString() {
1608
1609
1610 mayOverlap = false
1611 }
1612 }
1613
1614
1615 rhs := n.Y
1616 if rhs != nil {
1617 switch rhs.Op() {
1618 case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
1619
1620
1621
1622 if !ir.IsZero(rhs) {
1623 s.Fatalf("literal with nonzero value in SSA: %v", rhs)
1624 }
1625 rhs = nil
1626 case ir.OAPPEND:
1627 rhs := rhs.(*ir.CallExpr)
1628
1629
1630
1631 if !ir.SameSafeExpr(n.X, rhs.Args[0]) || base.Flag.N != 0 {
1632 break
1633 }
1634
1635
1636
1637 if s.canSSA(n.X) {
1638 if base.Debug.Append > 0 {
1639 base.WarnfAt(n.Pos(), "append: len-only update (in local slice)")
1640 }
1641 break
1642 }
1643 if base.Debug.Append > 0 {
1644 base.WarnfAt(n.Pos(), "append: len-only update")
1645 }
1646 s.append(rhs, true)
1647 return
1648 }
1649 }
1650
1651 if ir.IsBlank(n.X) {
1652
1653
1654 if rhs != nil {
1655 s.expr(rhs)
1656 }
1657 return
1658 }
1659
1660 var t *types.Type
1661 if n.Y != nil {
1662 t = n.Y.Type()
1663 } else {
1664 t = n.X.Type()
1665 }
1666
1667 var r *ssa.Value
1668 deref := !ssa.CanSSA(t)
1669 if deref {
1670 if rhs == nil {
1671 r = nil
1672 } else {
1673 r = s.addr(rhs)
1674 }
1675 } else {
1676 if rhs == nil {
1677 r = s.zeroVal(t)
1678 } else {
1679 r = s.expr(rhs)
1680 }
1681 }
1682
1683 var skip skipMask
1684 if rhs != nil && (rhs.Op() == ir.OSLICE || rhs.Op() == ir.OSLICE3 || rhs.Op() == ir.OSLICESTR) && ir.SameSafeExpr(rhs.(*ir.SliceExpr).X, n.X) {
1685
1686
1687 rhs := rhs.(*ir.SliceExpr)
1688 i, j, k := rhs.Low, rhs.High, rhs.Max
1689 if i != nil && (i.Op() == ir.OLITERAL && i.Val().Kind() == constant.Int && ir.Int64Val(i) == 0) {
1690
1691 i = nil
1692 }
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703 if i == nil {
1704 skip |= skipPtr
1705 if j == nil {
1706 skip |= skipLen
1707 }
1708 if k == nil {
1709 skip |= skipCap
1710 }
1711 }
1712 }
1713
1714 s.assignWhichMayOverlap(n.X, r, deref, skip, mayOverlap)
1715
1716 case ir.OIF:
1717 n := n.(*ir.IfStmt)
1718 if ir.IsConst(n.Cond, constant.Bool) {
1719 s.stmtList(n.Cond.Init())
1720 if ir.BoolVal(n.Cond) {
1721 s.stmtList(n.Body)
1722 } else {
1723 s.stmtList(n.Else)
1724 }
1725 break
1726 }
1727
1728 bEnd := s.f.NewBlock(ssa.BlockPlain)
1729 var likely int8
1730 if n.Likely {
1731 likely = 1
1732 }
1733 var bThen *ssa.Block
1734 if len(n.Body) != 0 {
1735 bThen = s.f.NewBlock(ssa.BlockPlain)
1736 } else {
1737 bThen = bEnd
1738 }
1739 var bElse *ssa.Block
1740 if len(n.Else) != 0 {
1741 bElse = s.f.NewBlock(ssa.BlockPlain)
1742 } else {
1743 bElse = bEnd
1744 }
1745 s.condBranch(n.Cond, bThen, bElse, likely)
1746
1747 if len(n.Body) != 0 {
1748 s.startBlock(bThen)
1749 s.stmtList(n.Body)
1750 if b := s.endBlock(); b != nil {
1751 b.AddEdgeTo(bEnd)
1752 }
1753 }
1754 if len(n.Else) != 0 {
1755 s.startBlock(bElse)
1756 s.stmtList(n.Else)
1757 if b := s.endBlock(); b != nil {
1758 b.AddEdgeTo(bEnd)
1759 }
1760 }
1761 s.startBlock(bEnd)
1762
1763 case ir.ORETURN:
1764 n := n.(*ir.ReturnStmt)
1765 s.stmtList(n.Results)
1766 b := s.exit()
1767 b.Pos = s.lastPos.WithIsStmt()
1768
1769 case ir.OTAILCALL:
1770 n := n.(*ir.TailCallStmt)
1771 s.callResult(n.Call, callTail)
1772 call := s.mem()
1773 b := s.endBlock()
1774 b.Kind = ssa.BlockRetJmp
1775 b.SetControl(call)
1776
1777 case ir.OCONTINUE, ir.OBREAK:
1778 n := n.(*ir.BranchStmt)
1779 var to *ssa.Block
1780 if n.Label == nil {
1781
1782 switch n.Op() {
1783 case ir.OCONTINUE:
1784 to = s.continueTo
1785 case ir.OBREAK:
1786 to = s.breakTo
1787 }
1788 } else {
1789
1790 sym := n.Label
1791 lab := s.label(sym)
1792 switch n.Op() {
1793 case ir.OCONTINUE:
1794 to = lab.continueTarget
1795 case ir.OBREAK:
1796 to = lab.breakTarget
1797 }
1798 }
1799
1800 b := s.endBlock()
1801 b.Pos = s.lastPos.WithIsStmt()
1802 b.AddEdgeTo(to)
1803
1804 case ir.OFOR:
1805
1806
1807 n := n.(*ir.ForStmt)
1808 base.Assert(!n.DistinctVars)
1809 bCond := s.f.NewBlock(ssa.BlockPlain)
1810 bBody := s.f.NewBlock(ssa.BlockPlain)
1811 bIncr := s.f.NewBlock(ssa.BlockPlain)
1812 bEnd := s.f.NewBlock(ssa.BlockPlain)
1813
1814
1815 bBody.Pos = n.Pos()
1816
1817
1818 b := s.endBlock()
1819 b.AddEdgeTo(bCond)
1820
1821
1822 s.startBlock(bCond)
1823 if n.Cond != nil {
1824 s.condBranch(n.Cond, bBody, bEnd, 1)
1825 } else {
1826 b := s.endBlock()
1827 b.Kind = ssa.BlockPlain
1828 b.AddEdgeTo(bBody)
1829 }
1830
1831
1832 prevContinue := s.continueTo
1833 prevBreak := s.breakTo
1834 s.continueTo = bIncr
1835 s.breakTo = bEnd
1836 var lab *ssaLabel
1837 if sym := n.Label; sym != nil {
1838
1839 lab = s.label(sym)
1840 lab.continueTarget = bIncr
1841 lab.breakTarget = bEnd
1842 }
1843
1844
1845 s.startBlock(bBody)
1846 s.stmtList(n.Body)
1847
1848
1849 s.continueTo = prevContinue
1850 s.breakTo = prevBreak
1851 if lab != nil {
1852 lab.continueTarget = nil
1853 lab.breakTarget = nil
1854 }
1855
1856
1857 if b := s.endBlock(); b != nil {
1858 b.AddEdgeTo(bIncr)
1859 }
1860
1861
1862 s.startBlock(bIncr)
1863 if n.Post != nil {
1864 s.stmt(n.Post)
1865 }
1866 if b := s.endBlock(); b != nil {
1867 b.AddEdgeTo(bCond)
1868
1869
1870 if b.Pos == src.NoXPos {
1871 b.Pos = bCond.Pos
1872 }
1873 }
1874
1875 s.startBlock(bEnd)
1876
1877 case ir.OSWITCH, ir.OSELECT:
1878
1879
1880 bEnd := s.f.NewBlock(ssa.BlockPlain)
1881
1882 prevBreak := s.breakTo
1883 s.breakTo = bEnd
1884 var sym *types.Sym
1885 var body ir.Nodes
1886 if n.Op() == ir.OSWITCH {
1887 n := n.(*ir.SwitchStmt)
1888 sym = n.Label
1889 body = n.Compiled
1890 } else {
1891 n := n.(*ir.SelectStmt)
1892 sym = n.Label
1893 body = n.Compiled
1894 }
1895
1896 var lab *ssaLabel
1897 if sym != nil {
1898
1899 lab = s.label(sym)
1900 lab.breakTarget = bEnd
1901 }
1902
1903
1904 s.stmtList(body)
1905
1906 s.breakTo = prevBreak
1907 if lab != nil {
1908 lab.breakTarget = nil
1909 }
1910
1911
1912
1913 if s.curBlock != nil {
1914 m := s.mem()
1915 b := s.endBlock()
1916 b.Kind = ssa.BlockExit
1917 b.SetControl(m)
1918 }
1919 s.startBlock(bEnd)
1920
1921 case ir.OJUMPTABLE:
1922 n := n.(*ir.JumpTableStmt)
1923
1924
1925 jt := s.f.NewBlock(ssa.BlockJumpTable)
1926 bEnd := s.f.NewBlock(ssa.BlockPlain)
1927
1928
1929 idx := s.expr(n.Idx)
1930 unsigned := idx.Type.IsUnsigned()
1931
1932
1933 t := types.Types[types.TUINTPTR]
1934 idx = s.conv(nil, idx, idx.Type, t)
1935
1936
1937
1938
1939
1940
1941
1942 var min, max uint64
1943 if unsigned {
1944 min, _ = constant.Uint64Val(n.Cases[0])
1945 max, _ = constant.Uint64Val(n.Cases[len(n.Cases)-1])
1946 } else {
1947 mn, _ := constant.Int64Val(n.Cases[0])
1948 mx, _ := constant.Int64Val(n.Cases[len(n.Cases)-1])
1949 min = uint64(mn)
1950 max = uint64(mx)
1951 }
1952
1953 idx = s.newValue2(s.ssaOp(ir.OSUB, t), t, idx, s.uintptrConstant(min))
1954 width := s.uintptrConstant(max - min)
1955 cmp := s.newValue2(s.ssaOp(ir.OLE, t), types.Types[types.TBOOL], idx, width)
1956 b := s.endBlock()
1957 b.Kind = ssa.BlockIf
1958 b.SetControl(cmp)
1959 b.AddEdgeTo(jt)
1960 b.AddEdgeTo(bEnd)
1961 b.Likely = ssa.BranchLikely
1962
1963
1964 s.startBlock(jt)
1965 jt.Pos = n.Pos()
1966 if base.Flag.Cfg.SpectreIndex {
1967 idx = s.newValue2(ssa.OpSpectreSliceIndex, t, idx, width)
1968 }
1969 jt.SetControl(idx)
1970
1971
1972 table := make([]*ssa.Block, max-min+1)
1973 for i := range table {
1974 table[i] = bEnd
1975 }
1976 for i := range n.Targets {
1977 c := n.Cases[i]
1978 lab := s.label(n.Targets[i])
1979 if lab.target == nil {
1980 lab.target = s.f.NewBlock(ssa.BlockPlain)
1981 }
1982 var val uint64
1983 if unsigned {
1984 val, _ = constant.Uint64Val(c)
1985 } else {
1986 vl, _ := constant.Int64Val(c)
1987 val = uint64(vl)
1988 }
1989
1990 table[val-min] = lab.target
1991 }
1992 for _, t := range table {
1993 jt.AddEdgeTo(t)
1994 }
1995 s.endBlock()
1996
1997 s.startBlock(bEnd)
1998
1999 case ir.OINTERFACESWITCH:
2000 n := n.(*ir.InterfaceSwitchStmt)
2001 typs := s.f.Config.Types
2002
2003 t := s.expr(n.RuntimeType)
2004 h := s.expr(n.Hash)
2005 d := s.newValue1A(ssa.OpAddr, typs.BytePtr, n.Descriptor, s.sb)
2006
2007
2008 var merge *ssa.Block
2009 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
2010
2011
2012 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "runtime/internal/atomic", "Loadp"}]; !ok {
2013 s.Fatalf("atomic load not available")
2014 }
2015 merge = s.f.NewBlock(ssa.BlockPlain)
2016 cacheHit := s.f.NewBlock(ssa.BlockPlain)
2017 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
2018 loopHead := s.f.NewBlock(ssa.BlockPlain)
2019 loopBody := s.f.NewBlock(ssa.BlockPlain)
2020
2021
2022 var mul, and, add, zext ssa.Op
2023 if s.config.PtrSize == 4 {
2024 mul = ssa.OpMul32
2025 and = ssa.OpAnd32
2026 add = ssa.OpAdd32
2027 zext = ssa.OpCopy
2028 } else {
2029 mul = ssa.OpMul64
2030 and = ssa.OpAnd64
2031 add = ssa.OpAdd64
2032 zext = ssa.OpZeroExt32to64
2033 }
2034
2035
2036
2037 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
2038 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
2039 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
2040
2041
2042 s.vars[hashVar] = s.newValue1(zext, typs.Uintptr, h)
2043
2044
2045 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
2046
2047 b := s.endBlock()
2048 b.AddEdgeTo(loopHead)
2049
2050
2051
2052 s.startBlock(loopHead)
2053 entries := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, s.uintptrConstant(uint64(s.config.PtrSize)))
2054 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
2055 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(3*s.config.PtrSize)))
2056 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, entries, idx)
2057
2058 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
2059
2060
2061
2062 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
2063 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, t, eTyp)
2064 b = s.endBlock()
2065 b.Kind = ssa.BlockIf
2066 b.SetControl(cmp1)
2067 b.AddEdgeTo(cacheHit)
2068 b.AddEdgeTo(loopBody)
2069
2070
2071
2072 s.startBlock(loopBody)
2073 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
2074 b = s.endBlock()
2075 b.Kind = ssa.BlockIf
2076 b.SetControl(cmp2)
2077 b.AddEdgeTo(cacheMiss)
2078 b.AddEdgeTo(loopHead)
2079
2080
2081
2082
2083 s.startBlock(cacheHit)
2084 eCase := s.newValue2(ssa.OpLoad, typs.Int, s.newValue1I(ssa.OpOffPtr, typs.IntPtr, s.config.PtrSize, e), s.mem())
2085 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, 2*s.config.PtrSize, e), s.mem())
2086 s.assign(n.Case, eCase, false, 0)
2087 s.assign(n.Itab, eItab, false, 0)
2088 b = s.endBlock()
2089 b.AddEdgeTo(merge)
2090
2091
2092 s.startBlock(cacheMiss)
2093 }
2094
2095 r := s.rtcall(ir.Syms.InterfaceSwitch, true, []*types.Type{typs.Int, typs.BytePtr}, d, t)
2096 s.assign(n.Case, r[0], false, 0)
2097 s.assign(n.Itab, r[1], false, 0)
2098
2099 if merge != nil {
2100
2101 b := s.endBlock()
2102 b.Kind = ssa.BlockPlain
2103 b.AddEdgeTo(merge)
2104 s.startBlock(merge)
2105 }
2106
2107 case ir.OCHECKNIL:
2108 n := n.(*ir.UnaryExpr)
2109 p := s.expr(n.X)
2110 _ = s.nilCheck(p)
2111
2112
2113 case ir.OINLMARK:
2114 n := n.(*ir.InlineMarkStmt)
2115 s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Index, s.mem())
2116
2117 default:
2118 s.Fatalf("unhandled stmt %v", n.Op())
2119 }
2120 }
2121
2122
2123
2124 const shareDeferExits = false
2125
2126
2127
2128
2129 func (s *state) exit() *ssa.Block {
2130 if s.hasdefer {
2131 if s.hasOpenDefers {
2132 if shareDeferExits && s.lastDeferExit != nil && len(s.openDefers) == s.lastDeferCount {
2133 if s.curBlock.Kind != ssa.BlockPlain {
2134 panic("Block for an exit should be BlockPlain")
2135 }
2136 s.curBlock.AddEdgeTo(s.lastDeferExit)
2137 s.endBlock()
2138 return s.lastDeferFinalBlock
2139 }
2140 s.openDeferExit()
2141 } else {
2142 s.rtcall(ir.Syms.Deferreturn, true, nil)
2143 }
2144 }
2145
2146
2147
2148 resultFields := s.curfn.Type().Results()
2149 results := make([]*ssa.Value, len(resultFields)+1, len(resultFields)+1)
2150
2151 for i, f := range resultFields {
2152 n := f.Nname.(*ir.Name)
2153 if s.canSSA(n) {
2154 if !n.IsOutputParamInRegisters() && n.Type().HasPointers() {
2155
2156 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2157 }
2158 results[i] = s.variable(n, n.Type())
2159 } else if !n.OnStack() {
2160
2161 if n.Type().HasPointers() {
2162 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, n, s.mem())
2163 }
2164 ha := s.expr(n.Heapaddr)
2165 s.instrumentFields(n.Type(), ha, instrumentRead)
2166 results[i] = s.newValue2(ssa.OpDereference, n.Type(), ha, s.mem())
2167 } else {
2168
2169
2170
2171 results[i] = s.newValue2(ssa.OpDereference, n.Type(), s.addr(n), s.mem())
2172 }
2173 }
2174
2175
2176
2177
2178 if s.instrumentEnterExit {
2179 s.rtcall(ir.Syms.Racefuncexit, true, nil)
2180 }
2181
2182 results[len(results)-1] = s.mem()
2183 m := s.newValue0(ssa.OpMakeResult, s.f.OwnAux.LateExpansionResultType())
2184 m.AddArgs(results...)
2185
2186 b := s.endBlock()
2187 b.Kind = ssa.BlockRet
2188 b.SetControl(m)
2189 if s.hasdefer && s.hasOpenDefers {
2190 s.lastDeferFinalBlock = b
2191 }
2192 return b
2193 }
2194
2195 type opAndType struct {
2196 op ir.Op
2197 etype types.Kind
2198 }
2199
2200 var opToSSA = map[opAndType]ssa.Op{
2201 {ir.OADD, types.TINT8}: ssa.OpAdd8,
2202 {ir.OADD, types.TUINT8}: ssa.OpAdd8,
2203 {ir.OADD, types.TINT16}: ssa.OpAdd16,
2204 {ir.OADD, types.TUINT16}: ssa.OpAdd16,
2205 {ir.OADD, types.TINT32}: ssa.OpAdd32,
2206 {ir.OADD, types.TUINT32}: ssa.OpAdd32,
2207 {ir.OADD, types.TINT64}: ssa.OpAdd64,
2208 {ir.OADD, types.TUINT64}: ssa.OpAdd64,
2209 {ir.OADD, types.TFLOAT32}: ssa.OpAdd32F,
2210 {ir.OADD, types.TFLOAT64}: ssa.OpAdd64F,
2211
2212 {ir.OSUB, types.TINT8}: ssa.OpSub8,
2213 {ir.OSUB, types.TUINT8}: ssa.OpSub8,
2214 {ir.OSUB, types.TINT16}: ssa.OpSub16,
2215 {ir.OSUB, types.TUINT16}: ssa.OpSub16,
2216 {ir.OSUB, types.TINT32}: ssa.OpSub32,
2217 {ir.OSUB, types.TUINT32}: ssa.OpSub32,
2218 {ir.OSUB, types.TINT64}: ssa.OpSub64,
2219 {ir.OSUB, types.TUINT64}: ssa.OpSub64,
2220 {ir.OSUB, types.TFLOAT32}: ssa.OpSub32F,
2221 {ir.OSUB, types.TFLOAT64}: ssa.OpSub64F,
2222
2223 {ir.ONOT, types.TBOOL}: ssa.OpNot,
2224
2225 {ir.ONEG, types.TINT8}: ssa.OpNeg8,
2226 {ir.ONEG, types.TUINT8}: ssa.OpNeg8,
2227 {ir.ONEG, types.TINT16}: ssa.OpNeg16,
2228 {ir.ONEG, types.TUINT16}: ssa.OpNeg16,
2229 {ir.ONEG, types.TINT32}: ssa.OpNeg32,
2230 {ir.ONEG, types.TUINT32}: ssa.OpNeg32,
2231 {ir.ONEG, types.TINT64}: ssa.OpNeg64,
2232 {ir.ONEG, types.TUINT64}: ssa.OpNeg64,
2233 {ir.ONEG, types.TFLOAT32}: ssa.OpNeg32F,
2234 {ir.ONEG, types.TFLOAT64}: ssa.OpNeg64F,
2235
2236 {ir.OBITNOT, types.TINT8}: ssa.OpCom8,
2237 {ir.OBITNOT, types.TUINT8}: ssa.OpCom8,
2238 {ir.OBITNOT, types.TINT16}: ssa.OpCom16,
2239 {ir.OBITNOT, types.TUINT16}: ssa.OpCom16,
2240 {ir.OBITNOT, types.TINT32}: ssa.OpCom32,
2241 {ir.OBITNOT, types.TUINT32}: ssa.OpCom32,
2242 {ir.OBITNOT, types.TINT64}: ssa.OpCom64,
2243 {ir.OBITNOT, types.TUINT64}: ssa.OpCom64,
2244
2245 {ir.OIMAG, types.TCOMPLEX64}: ssa.OpComplexImag,
2246 {ir.OIMAG, types.TCOMPLEX128}: ssa.OpComplexImag,
2247 {ir.OREAL, types.TCOMPLEX64}: ssa.OpComplexReal,
2248 {ir.OREAL, types.TCOMPLEX128}: ssa.OpComplexReal,
2249
2250 {ir.OMUL, types.TINT8}: ssa.OpMul8,
2251 {ir.OMUL, types.TUINT8}: ssa.OpMul8,
2252 {ir.OMUL, types.TINT16}: ssa.OpMul16,
2253 {ir.OMUL, types.TUINT16}: ssa.OpMul16,
2254 {ir.OMUL, types.TINT32}: ssa.OpMul32,
2255 {ir.OMUL, types.TUINT32}: ssa.OpMul32,
2256 {ir.OMUL, types.TINT64}: ssa.OpMul64,
2257 {ir.OMUL, types.TUINT64}: ssa.OpMul64,
2258 {ir.OMUL, types.TFLOAT32}: ssa.OpMul32F,
2259 {ir.OMUL, types.TFLOAT64}: ssa.OpMul64F,
2260
2261 {ir.ODIV, types.TFLOAT32}: ssa.OpDiv32F,
2262 {ir.ODIV, types.TFLOAT64}: ssa.OpDiv64F,
2263
2264 {ir.ODIV, types.TINT8}: ssa.OpDiv8,
2265 {ir.ODIV, types.TUINT8}: ssa.OpDiv8u,
2266 {ir.ODIV, types.TINT16}: ssa.OpDiv16,
2267 {ir.ODIV, types.TUINT16}: ssa.OpDiv16u,
2268 {ir.ODIV, types.TINT32}: ssa.OpDiv32,
2269 {ir.ODIV, types.TUINT32}: ssa.OpDiv32u,
2270 {ir.ODIV, types.TINT64}: ssa.OpDiv64,
2271 {ir.ODIV, types.TUINT64}: ssa.OpDiv64u,
2272
2273 {ir.OMOD, types.TINT8}: ssa.OpMod8,
2274 {ir.OMOD, types.TUINT8}: ssa.OpMod8u,
2275 {ir.OMOD, types.TINT16}: ssa.OpMod16,
2276 {ir.OMOD, types.TUINT16}: ssa.OpMod16u,
2277 {ir.OMOD, types.TINT32}: ssa.OpMod32,
2278 {ir.OMOD, types.TUINT32}: ssa.OpMod32u,
2279 {ir.OMOD, types.TINT64}: ssa.OpMod64,
2280 {ir.OMOD, types.TUINT64}: ssa.OpMod64u,
2281
2282 {ir.OAND, types.TINT8}: ssa.OpAnd8,
2283 {ir.OAND, types.TUINT8}: ssa.OpAnd8,
2284 {ir.OAND, types.TINT16}: ssa.OpAnd16,
2285 {ir.OAND, types.TUINT16}: ssa.OpAnd16,
2286 {ir.OAND, types.TINT32}: ssa.OpAnd32,
2287 {ir.OAND, types.TUINT32}: ssa.OpAnd32,
2288 {ir.OAND, types.TINT64}: ssa.OpAnd64,
2289 {ir.OAND, types.TUINT64}: ssa.OpAnd64,
2290
2291 {ir.OOR, types.TINT8}: ssa.OpOr8,
2292 {ir.OOR, types.TUINT8}: ssa.OpOr8,
2293 {ir.OOR, types.TINT16}: ssa.OpOr16,
2294 {ir.OOR, types.TUINT16}: ssa.OpOr16,
2295 {ir.OOR, types.TINT32}: ssa.OpOr32,
2296 {ir.OOR, types.TUINT32}: ssa.OpOr32,
2297 {ir.OOR, types.TINT64}: ssa.OpOr64,
2298 {ir.OOR, types.TUINT64}: ssa.OpOr64,
2299
2300 {ir.OXOR, types.TINT8}: ssa.OpXor8,
2301 {ir.OXOR, types.TUINT8}: ssa.OpXor8,
2302 {ir.OXOR, types.TINT16}: ssa.OpXor16,
2303 {ir.OXOR, types.TUINT16}: ssa.OpXor16,
2304 {ir.OXOR, types.TINT32}: ssa.OpXor32,
2305 {ir.OXOR, types.TUINT32}: ssa.OpXor32,
2306 {ir.OXOR, types.TINT64}: ssa.OpXor64,
2307 {ir.OXOR, types.TUINT64}: ssa.OpXor64,
2308
2309 {ir.OEQ, types.TBOOL}: ssa.OpEqB,
2310 {ir.OEQ, types.TINT8}: ssa.OpEq8,
2311 {ir.OEQ, types.TUINT8}: ssa.OpEq8,
2312 {ir.OEQ, types.TINT16}: ssa.OpEq16,
2313 {ir.OEQ, types.TUINT16}: ssa.OpEq16,
2314 {ir.OEQ, types.TINT32}: ssa.OpEq32,
2315 {ir.OEQ, types.TUINT32}: ssa.OpEq32,
2316 {ir.OEQ, types.TINT64}: ssa.OpEq64,
2317 {ir.OEQ, types.TUINT64}: ssa.OpEq64,
2318 {ir.OEQ, types.TINTER}: ssa.OpEqInter,
2319 {ir.OEQ, types.TSLICE}: ssa.OpEqSlice,
2320 {ir.OEQ, types.TFUNC}: ssa.OpEqPtr,
2321 {ir.OEQ, types.TMAP}: ssa.OpEqPtr,
2322 {ir.OEQ, types.TCHAN}: ssa.OpEqPtr,
2323 {ir.OEQ, types.TPTR}: ssa.OpEqPtr,
2324 {ir.OEQ, types.TUINTPTR}: ssa.OpEqPtr,
2325 {ir.OEQ, types.TUNSAFEPTR}: ssa.OpEqPtr,
2326 {ir.OEQ, types.TFLOAT64}: ssa.OpEq64F,
2327 {ir.OEQ, types.TFLOAT32}: ssa.OpEq32F,
2328
2329 {ir.ONE, types.TBOOL}: ssa.OpNeqB,
2330 {ir.ONE, types.TINT8}: ssa.OpNeq8,
2331 {ir.ONE, types.TUINT8}: ssa.OpNeq8,
2332 {ir.ONE, types.TINT16}: ssa.OpNeq16,
2333 {ir.ONE, types.TUINT16}: ssa.OpNeq16,
2334 {ir.ONE, types.TINT32}: ssa.OpNeq32,
2335 {ir.ONE, types.TUINT32}: ssa.OpNeq32,
2336 {ir.ONE, types.TINT64}: ssa.OpNeq64,
2337 {ir.ONE, types.TUINT64}: ssa.OpNeq64,
2338 {ir.ONE, types.TINTER}: ssa.OpNeqInter,
2339 {ir.ONE, types.TSLICE}: ssa.OpNeqSlice,
2340 {ir.ONE, types.TFUNC}: ssa.OpNeqPtr,
2341 {ir.ONE, types.TMAP}: ssa.OpNeqPtr,
2342 {ir.ONE, types.TCHAN}: ssa.OpNeqPtr,
2343 {ir.ONE, types.TPTR}: ssa.OpNeqPtr,
2344 {ir.ONE, types.TUINTPTR}: ssa.OpNeqPtr,
2345 {ir.ONE, types.TUNSAFEPTR}: ssa.OpNeqPtr,
2346 {ir.ONE, types.TFLOAT64}: ssa.OpNeq64F,
2347 {ir.ONE, types.TFLOAT32}: ssa.OpNeq32F,
2348
2349 {ir.OLT, types.TINT8}: ssa.OpLess8,
2350 {ir.OLT, types.TUINT8}: ssa.OpLess8U,
2351 {ir.OLT, types.TINT16}: ssa.OpLess16,
2352 {ir.OLT, types.TUINT16}: ssa.OpLess16U,
2353 {ir.OLT, types.TINT32}: ssa.OpLess32,
2354 {ir.OLT, types.TUINT32}: ssa.OpLess32U,
2355 {ir.OLT, types.TINT64}: ssa.OpLess64,
2356 {ir.OLT, types.TUINT64}: ssa.OpLess64U,
2357 {ir.OLT, types.TFLOAT64}: ssa.OpLess64F,
2358 {ir.OLT, types.TFLOAT32}: ssa.OpLess32F,
2359
2360 {ir.OLE, types.TINT8}: ssa.OpLeq8,
2361 {ir.OLE, types.TUINT8}: ssa.OpLeq8U,
2362 {ir.OLE, types.TINT16}: ssa.OpLeq16,
2363 {ir.OLE, types.TUINT16}: ssa.OpLeq16U,
2364 {ir.OLE, types.TINT32}: ssa.OpLeq32,
2365 {ir.OLE, types.TUINT32}: ssa.OpLeq32U,
2366 {ir.OLE, types.TINT64}: ssa.OpLeq64,
2367 {ir.OLE, types.TUINT64}: ssa.OpLeq64U,
2368 {ir.OLE, types.TFLOAT64}: ssa.OpLeq64F,
2369 {ir.OLE, types.TFLOAT32}: ssa.OpLeq32F,
2370 }
2371
2372 func (s *state) concreteEtype(t *types.Type) types.Kind {
2373 e := t.Kind()
2374 switch e {
2375 default:
2376 return e
2377 case types.TINT:
2378 if s.config.PtrSize == 8 {
2379 return types.TINT64
2380 }
2381 return types.TINT32
2382 case types.TUINT:
2383 if s.config.PtrSize == 8 {
2384 return types.TUINT64
2385 }
2386 return types.TUINT32
2387 case types.TUINTPTR:
2388 if s.config.PtrSize == 8 {
2389 return types.TUINT64
2390 }
2391 return types.TUINT32
2392 }
2393 }
2394
2395 func (s *state) ssaOp(op ir.Op, t *types.Type) ssa.Op {
2396 etype := s.concreteEtype(t)
2397 x, ok := opToSSA[opAndType{op, etype}]
2398 if !ok {
2399 s.Fatalf("unhandled binary op %v %s", op, etype)
2400 }
2401 return x
2402 }
2403
2404 type opAndTwoTypes struct {
2405 op ir.Op
2406 etype1 types.Kind
2407 etype2 types.Kind
2408 }
2409
2410 type twoTypes struct {
2411 etype1 types.Kind
2412 etype2 types.Kind
2413 }
2414
2415 type twoOpsAndType struct {
2416 op1 ssa.Op
2417 op2 ssa.Op
2418 intermediateType types.Kind
2419 }
2420
2421 var fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2422
2423 {types.TINT8, types.TFLOAT32}: {ssa.OpSignExt8to32, ssa.OpCvt32to32F, types.TINT32},
2424 {types.TINT16, types.TFLOAT32}: {ssa.OpSignExt16to32, ssa.OpCvt32to32F, types.TINT32},
2425 {types.TINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32to32F, types.TINT32},
2426 {types.TINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64to32F, types.TINT64},
2427
2428 {types.TINT8, types.TFLOAT64}: {ssa.OpSignExt8to32, ssa.OpCvt32to64F, types.TINT32},
2429 {types.TINT16, types.TFLOAT64}: {ssa.OpSignExt16to32, ssa.OpCvt32to64F, types.TINT32},
2430 {types.TINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32to64F, types.TINT32},
2431 {types.TINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64to64F, types.TINT64},
2432
2433 {types.TFLOAT32, types.TINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2434 {types.TFLOAT32, types.TINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2435 {types.TFLOAT32, types.TINT32}: {ssa.OpCvt32Fto32, ssa.OpCopy, types.TINT32},
2436 {types.TFLOAT32, types.TINT64}: {ssa.OpCvt32Fto64, ssa.OpCopy, types.TINT64},
2437
2438 {types.TFLOAT64, types.TINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2439 {types.TFLOAT64, types.TINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2440 {types.TFLOAT64, types.TINT32}: {ssa.OpCvt64Fto32, ssa.OpCopy, types.TINT32},
2441 {types.TFLOAT64, types.TINT64}: {ssa.OpCvt64Fto64, ssa.OpCopy, types.TINT64},
2442
2443 {types.TUINT8, types.TFLOAT32}: {ssa.OpZeroExt8to32, ssa.OpCvt32to32F, types.TINT32},
2444 {types.TUINT16, types.TFLOAT32}: {ssa.OpZeroExt16to32, ssa.OpCvt32to32F, types.TINT32},
2445 {types.TUINT32, types.TFLOAT32}: {ssa.OpZeroExt32to64, ssa.OpCvt64to32F, types.TINT64},
2446 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2447
2448 {types.TUINT8, types.TFLOAT64}: {ssa.OpZeroExt8to32, ssa.OpCvt32to64F, types.TINT32},
2449 {types.TUINT16, types.TFLOAT64}: {ssa.OpZeroExt16to32, ssa.OpCvt32to64F, types.TINT32},
2450 {types.TUINT32, types.TFLOAT64}: {ssa.OpZeroExt32to64, ssa.OpCvt64to64F, types.TINT64},
2451 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpInvalid, types.TUINT64},
2452
2453 {types.TFLOAT32, types.TUINT8}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to8, types.TINT32},
2454 {types.TFLOAT32, types.TUINT16}: {ssa.OpCvt32Fto32, ssa.OpTrunc32to16, types.TINT32},
2455 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto64, ssa.OpTrunc64to32, types.TINT64},
2456 {types.TFLOAT32, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2457
2458 {types.TFLOAT64, types.TUINT8}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to8, types.TINT32},
2459 {types.TFLOAT64, types.TUINT16}: {ssa.OpCvt64Fto32, ssa.OpTrunc32to16, types.TINT32},
2460 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto64, ssa.OpTrunc64to32, types.TINT64},
2461 {types.TFLOAT64, types.TUINT64}: {ssa.OpInvalid, ssa.OpCopy, types.TUINT64},
2462
2463
2464 {types.TFLOAT64, types.TFLOAT32}: {ssa.OpCvt64Fto32F, ssa.OpCopy, types.TFLOAT32},
2465 {types.TFLOAT64, types.TFLOAT64}: {ssa.OpRound64F, ssa.OpCopy, types.TFLOAT64},
2466 {types.TFLOAT32, types.TFLOAT32}: {ssa.OpRound32F, ssa.OpCopy, types.TFLOAT32},
2467 {types.TFLOAT32, types.TFLOAT64}: {ssa.OpCvt32Fto64F, ssa.OpCopy, types.TFLOAT64},
2468 }
2469
2470
2471
2472 var fpConvOpToSSA32 = map[twoTypes]twoOpsAndType{
2473 {types.TUINT32, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt32Uto32F, types.TUINT32},
2474 {types.TUINT32, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt32Uto64F, types.TUINT32},
2475 {types.TFLOAT32, types.TUINT32}: {ssa.OpCvt32Fto32U, ssa.OpCopy, types.TUINT32},
2476 {types.TFLOAT64, types.TUINT32}: {ssa.OpCvt64Fto32U, ssa.OpCopy, types.TUINT32},
2477 }
2478
2479
2480 var uint64fpConvOpToSSA = map[twoTypes]twoOpsAndType{
2481 {types.TUINT64, types.TFLOAT32}: {ssa.OpCopy, ssa.OpCvt64Uto32F, types.TUINT64},
2482 {types.TUINT64, types.TFLOAT64}: {ssa.OpCopy, ssa.OpCvt64Uto64F, types.TUINT64},
2483 {types.TFLOAT32, types.TUINT64}: {ssa.OpCvt32Fto64U, ssa.OpCopy, types.TUINT64},
2484 {types.TFLOAT64, types.TUINT64}: {ssa.OpCvt64Fto64U, ssa.OpCopy, types.TUINT64},
2485 }
2486
2487 var shiftOpToSSA = map[opAndTwoTypes]ssa.Op{
2488 {ir.OLSH, types.TINT8, types.TUINT8}: ssa.OpLsh8x8,
2489 {ir.OLSH, types.TUINT8, types.TUINT8}: ssa.OpLsh8x8,
2490 {ir.OLSH, types.TINT8, types.TUINT16}: ssa.OpLsh8x16,
2491 {ir.OLSH, types.TUINT8, types.TUINT16}: ssa.OpLsh8x16,
2492 {ir.OLSH, types.TINT8, types.TUINT32}: ssa.OpLsh8x32,
2493 {ir.OLSH, types.TUINT8, types.TUINT32}: ssa.OpLsh8x32,
2494 {ir.OLSH, types.TINT8, types.TUINT64}: ssa.OpLsh8x64,
2495 {ir.OLSH, types.TUINT8, types.TUINT64}: ssa.OpLsh8x64,
2496
2497 {ir.OLSH, types.TINT16, types.TUINT8}: ssa.OpLsh16x8,
2498 {ir.OLSH, types.TUINT16, types.TUINT8}: ssa.OpLsh16x8,
2499 {ir.OLSH, types.TINT16, types.TUINT16}: ssa.OpLsh16x16,
2500 {ir.OLSH, types.TUINT16, types.TUINT16}: ssa.OpLsh16x16,
2501 {ir.OLSH, types.TINT16, types.TUINT32}: ssa.OpLsh16x32,
2502 {ir.OLSH, types.TUINT16, types.TUINT32}: ssa.OpLsh16x32,
2503 {ir.OLSH, types.TINT16, types.TUINT64}: ssa.OpLsh16x64,
2504 {ir.OLSH, types.TUINT16, types.TUINT64}: ssa.OpLsh16x64,
2505
2506 {ir.OLSH, types.TINT32, types.TUINT8}: ssa.OpLsh32x8,
2507 {ir.OLSH, types.TUINT32, types.TUINT8}: ssa.OpLsh32x8,
2508 {ir.OLSH, types.TINT32, types.TUINT16}: ssa.OpLsh32x16,
2509 {ir.OLSH, types.TUINT32, types.TUINT16}: ssa.OpLsh32x16,
2510 {ir.OLSH, types.TINT32, types.TUINT32}: ssa.OpLsh32x32,
2511 {ir.OLSH, types.TUINT32, types.TUINT32}: ssa.OpLsh32x32,
2512 {ir.OLSH, types.TINT32, types.TUINT64}: ssa.OpLsh32x64,
2513 {ir.OLSH, types.TUINT32, types.TUINT64}: ssa.OpLsh32x64,
2514
2515 {ir.OLSH, types.TINT64, types.TUINT8}: ssa.OpLsh64x8,
2516 {ir.OLSH, types.TUINT64, types.TUINT8}: ssa.OpLsh64x8,
2517 {ir.OLSH, types.TINT64, types.TUINT16}: ssa.OpLsh64x16,
2518 {ir.OLSH, types.TUINT64, types.TUINT16}: ssa.OpLsh64x16,
2519 {ir.OLSH, types.TINT64, types.TUINT32}: ssa.OpLsh64x32,
2520 {ir.OLSH, types.TUINT64, types.TUINT32}: ssa.OpLsh64x32,
2521 {ir.OLSH, types.TINT64, types.TUINT64}: ssa.OpLsh64x64,
2522 {ir.OLSH, types.TUINT64, types.TUINT64}: ssa.OpLsh64x64,
2523
2524 {ir.ORSH, types.TINT8, types.TUINT8}: ssa.OpRsh8x8,
2525 {ir.ORSH, types.TUINT8, types.TUINT8}: ssa.OpRsh8Ux8,
2526 {ir.ORSH, types.TINT8, types.TUINT16}: ssa.OpRsh8x16,
2527 {ir.ORSH, types.TUINT8, types.TUINT16}: ssa.OpRsh8Ux16,
2528 {ir.ORSH, types.TINT8, types.TUINT32}: ssa.OpRsh8x32,
2529 {ir.ORSH, types.TUINT8, types.TUINT32}: ssa.OpRsh8Ux32,
2530 {ir.ORSH, types.TINT8, types.TUINT64}: ssa.OpRsh8x64,
2531 {ir.ORSH, types.TUINT8, types.TUINT64}: ssa.OpRsh8Ux64,
2532
2533 {ir.ORSH, types.TINT16, types.TUINT8}: ssa.OpRsh16x8,
2534 {ir.ORSH, types.TUINT16, types.TUINT8}: ssa.OpRsh16Ux8,
2535 {ir.ORSH, types.TINT16, types.TUINT16}: ssa.OpRsh16x16,
2536 {ir.ORSH, types.TUINT16, types.TUINT16}: ssa.OpRsh16Ux16,
2537 {ir.ORSH, types.TINT16, types.TUINT32}: ssa.OpRsh16x32,
2538 {ir.ORSH, types.TUINT16, types.TUINT32}: ssa.OpRsh16Ux32,
2539 {ir.ORSH, types.TINT16, types.TUINT64}: ssa.OpRsh16x64,
2540 {ir.ORSH, types.TUINT16, types.TUINT64}: ssa.OpRsh16Ux64,
2541
2542 {ir.ORSH, types.TINT32, types.TUINT8}: ssa.OpRsh32x8,
2543 {ir.ORSH, types.TUINT32, types.TUINT8}: ssa.OpRsh32Ux8,
2544 {ir.ORSH, types.TINT32, types.TUINT16}: ssa.OpRsh32x16,
2545 {ir.ORSH, types.TUINT32, types.TUINT16}: ssa.OpRsh32Ux16,
2546 {ir.ORSH, types.TINT32, types.TUINT32}: ssa.OpRsh32x32,
2547 {ir.ORSH, types.TUINT32, types.TUINT32}: ssa.OpRsh32Ux32,
2548 {ir.ORSH, types.TINT32, types.TUINT64}: ssa.OpRsh32x64,
2549 {ir.ORSH, types.TUINT32, types.TUINT64}: ssa.OpRsh32Ux64,
2550
2551 {ir.ORSH, types.TINT64, types.TUINT8}: ssa.OpRsh64x8,
2552 {ir.ORSH, types.TUINT64, types.TUINT8}: ssa.OpRsh64Ux8,
2553 {ir.ORSH, types.TINT64, types.TUINT16}: ssa.OpRsh64x16,
2554 {ir.ORSH, types.TUINT64, types.TUINT16}: ssa.OpRsh64Ux16,
2555 {ir.ORSH, types.TINT64, types.TUINT32}: ssa.OpRsh64x32,
2556 {ir.ORSH, types.TUINT64, types.TUINT32}: ssa.OpRsh64Ux32,
2557 {ir.ORSH, types.TINT64, types.TUINT64}: ssa.OpRsh64x64,
2558 {ir.ORSH, types.TUINT64, types.TUINT64}: ssa.OpRsh64Ux64,
2559 }
2560
2561 func (s *state) ssaShiftOp(op ir.Op, t *types.Type, u *types.Type) ssa.Op {
2562 etype1 := s.concreteEtype(t)
2563 etype2 := s.concreteEtype(u)
2564 x, ok := shiftOpToSSA[opAndTwoTypes{op, etype1, etype2}]
2565 if !ok {
2566 s.Fatalf("unhandled shift op %v etype=%s/%s", op, etype1, etype2)
2567 }
2568 return x
2569 }
2570
2571 func (s *state) uintptrConstant(v uint64) *ssa.Value {
2572 if s.config.PtrSize == 4 {
2573 return s.newValue0I(ssa.OpConst32, types.Types[types.TUINTPTR], int64(v))
2574 }
2575 return s.newValue0I(ssa.OpConst64, types.Types[types.TUINTPTR], int64(v))
2576 }
2577
2578 func (s *state) conv(n ir.Node, v *ssa.Value, ft, tt *types.Type) *ssa.Value {
2579 if ft.IsBoolean() && tt.IsKind(types.TUINT8) {
2580
2581 return s.newValue1(ssa.OpCvtBoolToUint8, tt, v)
2582 }
2583 if ft.IsInteger() && tt.IsInteger() {
2584 var op ssa.Op
2585 if tt.Size() == ft.Size() {
2586 op = ssa.OpCopy
2587 } else if tt.Size() < ft.Size() {
2588
2589 switch 10*ft.Size() + tt.Size() {
2590 case 21:
2591 op = ssa.OpTrunc16to8
2592 case 41:
2593 op = ssa.OpTrunc32to8
2594 case 42:
2595 op = ssa.OpTrunc32to16
2596 case 81:
2597 op = ssa.OpTrunc64to8
2598 case 82:
2599 op = ssa.OpTrunc64to16
2600 case 84:
2601 op = ssa.OpTrunc64to32
2602 default:
2603 s.Fatalf("weird integer truncation %v -> %v", ft, tt)
2604 }
2605 } else if ft.IsSigned() {
2606
2607 switch 10*ft.Size() + tt.Size() {
2608 case 12:
2609 op = ssa.OpSignExt8to16
2610 case 14:
2611 op = ssa.OpSignExt8to32
2612 case 18:
2613 op = ssa.OpSignExt8to64
2614 case 24:
2615 op = ssa.OpSignExt16to32
2616 case 28:
2617 op = ssa.OpSignExt16to64
2618 case 48:
2619 op = ssa.OpSignExt32to64
2620 default:
2621 s.Fatalf("bad integer sign extension %v -> %v", ft, tt)
2622 }
2623 } else {
2624
2625 switch 10*ft.Size() + tt.Size() {
2626 case 12:
2627 op = ssa.OpZeroExt8to16
2628 case 14:
2629 op = ssa.OpZeroExt8to32
2630 case 18:
2631 op = ssa.OpZeroExt8to64
2632 case 24:
2633 op = ssa.OpZeroExt16to32
2634 case 28:
2635 op = ssa.OpZeroExt16to64
2636 case 48:
2637 op = ssa.OpZeroExt32to64
2638 default:
2639 s.Fatalf("weird integer sign extension %v -> %v", ft, tt)
2640 }
2641 }
2642 return s.newValue1(op, tt, v)
2643 }
2644
2645 if ft.IsComplex() && tt.IsComplex() {
2646 var op ssa.Op
2647 if ft.Size() == tt.Size() {
2648 switch ft.Size() {
2649 case 8:
2650 op = ssa.OpRound32F
2651 case 16:
2652 op = ssa.OpRound64F
2653 default:
2654 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2655 }
2656 } else if ft.Size() == 8 && tt.Size() == 16 {
2657 op = ssa.OpCvt32Fto64F
2658 } else if ft.Size() == 16 && tt.Size() == 8 {
2659 op = ssa.OpCvt64Fto32F
2660 } else {
2661 s.Fatalf("weird complex conversion %v -> %v", ft, tt)
2662 }
2663 ftp := types.FloatForComplex(ft)
2664 ttp := types.FloatForComplex(tt)
2665 return s.newValue2(ssa.OpComplexMake, tt,
2666 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexReal, ftp, v)),
2667 s.newValueOrSfCall1(op, ttp, s.newValue1(ssa.OpComplexImag, ftp, v)))
2668 }
2669
2670 if tt.IsComplex() {
2671
2672 et := types.FloatForComplex(tt)
2673 v = s.conv(n, v, ft, et)
2674 return s.newValue2(ssa.OpComplexMake, tt, v, s.zeroVal(et))
2675 }
2676
2677 if ft.IsFloat() || tt.IsFloat() {
2678 conv, ok := fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]
2679 if s.config.RegSize == 4 && Arch.LinkArch.Family != sys.MIPS && !s.softFloat {
2680 if conv1, ok1 := fpConvOpToSSA32[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2681 conv = conv1
2682 }
2683 }
2684 if Arch.LinkArch.Family == sys.ARM64 || Arch.LinkArch.Family == sys.Wasm || Arch.LinkArch.Family == sys.S390X || s.softFloat {
2685 if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
2686 conv = conv1
2687 }
2688 }
2689
2690 if Arch.LinkArch.Family == sys.MIPS && !s.softFloat {
2691 if ft.Size() == 4 && ft.IsInteger() && !ft.IsSigned() {
2692
2693 if tt.Size() == 4 {
2694 return s.uint32Tofloat32(n, v, ft, tt)
2695 }
2696 if tt.Size() == 8 {
2697 return s.uint32Tofloat64(n, v, ft, tt)
2698 }
2699 } else if tt.Size() == 4 && tt.IsInteger() && !tt.IsSigned() {
2700
2701 if ft.Size() == 4 {
2702 return s.float32ToUint32(n, v, ft, tt)
2703 }
2704 if ft.Size() == 8 {
2705 return s.float64ToUint32(n, v, ft, tt)
2706 }
2707 }
2708 }
2709
2710 if !ok {
2711 s.Fatalf("weird float conversion %v -> %v", ft, tt)
2712 }
2713 op1, op2, it := conv.op1, conv.op2, conv.intermediateType
2714
2715 if op1 != ssa.OpInvalid && op2 != ssa.OpInvalid {
2716
2717 if op1 == ssa.OpCopy {
2718 if op2 == ssa.OpCopy {
2719 return v
2720 }
2721 return s.newValueOrSfCall1(op2, tt, v)
2722 }
2723 if op2 == ssa.OpCopy {
2724 return s.newValueOrSfCall1(op1, tt, v)
2725 }
2726 return s.newValueOrSfCall1(op2, tt, s.newValueOrSfCall1(op1, types.Types[it], v))
2727 }
2728
2729 if ft.IsInteger() {
2730
2731 if tt.Size() == 4 {
2732 return s.uint64Tofloat32(n, v, ft, tt)
2733 }
2734 if tt.Size() == 8 {
2735 return s.uint64Tofloat64(n, v, ft, tt)
2736 }
2737 s.Fatalf("weird unsigned integer to float conversion %v -> %v", ft, tt)
2738 }
2739
2740 if ft.Size() == 4 {
2741 return s.float32ToUint64(n, v, ft, tt)
2742 }
2743 if ft.Size() == 8 {
2744 return s.float64ToUint64(n, v, ft, tt)
2745 }
2746 s.Fatalf("weird float to unsigned integer conversion %v -> %v", ft, tt)
2747 return nil
2748 }
2749
2750 s.Fatalf("unhandled OCONV %s -> %s", ft.Kind(), tt.Kind())
2751 return nil
2752 }
2753
2754
2755 func (s *state) expr(n ir.Node) *ssa.Value {
2756 return s.exprCheckPtr(n, true)
2757 }
2758
2759 func (s *state) exprCheckPtr(n ir.Node, checkPtrOK bool) *ssa.Value {
2760 if ir.HasUniquePos(n) {
2761
2762
2763 s.pushLine(n.Pos())
2764 defer s.popLine()
2765 }
2766
2767 s.stmtList(n.Init())
2768 switch n.Op() {
2769 case ir.OBYTES2STRTMP:
2770 n := n.(*ir.ConvExpr)
2771 slice := s.expr(n.X)
2772 ptr := s.newValue1(ssa.OpSlicePtr, s.f.Config.Types.BytePtr, slice)
2773 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
2774 return s.newValue2(ssa.OpStringMake, n.Type(), ptr, len)
2775 case ir.OSTR2BYTESTMP:
2776 n := n.(*ir.ConvExpr)
2777 str := s.expr(n.X)
2778 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, str)
2779 if !n.NonNil() {
2780
2781
2782
2783 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], ptr, s.constNil(ptr.Type))
2784 zerobase := s.newValue1A(ssa.OpAddr, ptr.Type, ir.Syms.Zerobase, s.sb)
2785 ptr = s.ternary(cond, ptr, zerobase)
2786 }
2787 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], str)
2788 return s.newValue3(ssa.OpSliceMake, n.Type(), ptr, len, len)
2789 case ir.OCFUNC:
2790 n := n.(*ir.UnaryExpr)
2791 aux := n.X.(*ir.Name).Linksym()
2792
2793
2794 if aux.ABI() != obj.ABIInternal {
2795 s.Fatalf("expected ABIInternal: %v", aux.ABI())
2796 }
2797 return s.entryNewValue1A(ssa.OpAddr, n.Type(), aux, s.sb)
2798 case ir.ONAME:
2799 n := n.(*ir.Name)
2800 if n.Class == ir.PFUNC {
2801
2802 sym := staticdata.FuncLinksym(n)
2803 return s.entryNewValue1A(ssa.OpAddr, types.NewPtr(n.Type()), sym, s.sb)
2804 }
2805 if s.canSSA(n) {
2806 return s.variable(n, n.Type())
2807 }
2808 return s.load(n.Type(), s.addr(n))
2809 case ir.OLINKSYMOFFSET:
2810 n := n.(*ir.LinksymOffsetExpr)
2811 return s.load(n.Type(), s.addr(n))
2812 case ir.ONIL:
2813 n := n.(*ir.NilExpr)
2814 t := n.Type()
2815 switch {
2816 case t.IsSlice():
2817 return s.constSlice(t)
2818 case t.IsInterface():
2819 return s.constInterface(t)
2820 default:
2821 return s.constNil(t)
2822 }
2823 case ir.OLITERAL:
2824 switch u := n.Val(); u.Kind() {
2825 case constant.Int:
2826 i := ir.IntVal(n.Type(), u)
2827 switch n.Type().Size() {
2828 case 1:
2829 return s.constInt8(n.Type(), int8(i))
2830 case 2:
2831 return s.constInt16(n.Type(), int16(i))
2832 case 4:
2833 return s.constInt32(n.Type(), int32(i))
2834 case 8:
2835 return s.constInt64(n.Type(), i)
2836 default:
2837 s.Fatalf("bad integer size %d", n.Type().Size())
2838 return nil
2839 }
2840 case constant.String:
2841 i := constant.StringVal(u)
2842 if i == "" {
2843 return s.constEmptyString(n.Type())
2844 }
2845 return s.entryNewValue0A(ssa.OpConstString, n.Type(), ssa.StringToAux(i))
2846 case constant.Bool:
2847 return s.constBool(constant.BoolVal(u))
2848 case constant.Float:
2849 f, _ := constant.Float64Val(u)
2850 switch n.Type().Size() {
2851 case 4:
2852 return s.constFloat32(n.Type(), f)
2853 case 8:
2854 return s.constFloat64(n.Type(), f)
2855 default:
2856 s.Fatalf("bad float size %d", n.Type().Size())
2857 return nil
2858 }
2859 case constant.Complex:
2860 re, _ := constant.Float64Val(constant.Real(u))
2861 im, _ := constant.Float64Val(constant.Imag(u))
2862 switch n.Type().Size() {
2863 case 8:
2864 pt := types.Types[types.TFLOAT32]
2865 return s.newValue2(ssa.OpComplexMake, n.Type(),
2866 s.constFloat32(pt, re),
2867 s.constFloat32(pt, im))
2868 case 16:
2869 pt := types.Types[types.TFLOAT64]
2870 return s.newValue2(ssa.OpComplexMake, n.Type(),
2871 s.constFloat64(pt, re),
2872 s.constFloat64(pt, im))
2873 default:
2874 s.Fatalf("bad complex size %d", n.Type().Size())
2875 return nil
2876 }
2877 default:
2878 s.Fatalf("unhandled OLITERAL %v", u.Kind())
2879 return nil
2880 }
2881 case ir.OCONVNOP:
2882 n := n.(*ir.ConvExpr)
2883 to := n.Type()
2884 from := n.X.Type()
2885
2886
2887
2888 x := s.expr(n.X)
2889 if to == from {
2890 return x
2891 }
2892
2893
2894
2895
2896
2897 if to.IsPtrShaped() != from.IsPtrShaped() {
2898 return s.newValue2(ssa.OpConvert, to, x, s.mem())
2899 }
2900
2901 v := s.newValue1(ssa.OpCopy, to, x)
2902
2903
2904 if to.Kind() == types.TFUNC && from.IsPtrShaped() {
2905 return v
2906 }
2907
2908
2909 if from.Kind() == to.Kind() {
2910 return v
2911 }
2912
2913
2914 if to.IsUnsafePtr() && from.IsPtrShaped() || from.IsUnsafePtr() && to.IsPtrShaped() {
2915 if s.checkPtrEnabled && checkPtrOK && to.IsPtr() && from.IsUnsafePtr() {
2916 s.checkPtrAlignment(n, v, nil)
2917 }
2918 return v
2919 }
2920
2921
2922 if to.Kind() == types.TMAP && from == types.NewPtr(reflectdata.MapType()) {
2923 return v
2924 }
2925
2926 types.CalcSize(from)
2927 types.CalcSize(to)
2928 if from.Size() != to.Size() {
2929 s.Fatalf("CONVNOP width mismatch %v (%d) -> %v (%d)\n", from, from.Size(), to, to.Size())
2930 return nil
2931 }
2932 if etypesign(from.Kind()) != etypesign(to.Kind()) {
2933 s.Fatalf("CONVNOP sign mismatch %v (%s) -> %v (%s)\n", from, from.Kind(), to, to.Kind())
2934 return nil
2935 }
2936
2937 if base.Flag.Cfg.Instrumenting {
2938
2939
2940
2941 return v
2942 }
2943
2944 if etypesign(from.Kind()) == 0 {
2945 s.Fatalf("CONVNOP unrecognized non-integer %v -> %v\n", from, to)
2946 return nil
2947 }
2948
2949
2950 return v
2951
2952 case ir.OCONV:
2953 n := n.(*ir.ConvExpr)
2954 x := s.expr(n.X)
2955 return s.conv(n, x, n.X.Type(), n.Type())
2956
2957 case ir.ODOTTYPE:
2958 n := n.(*ir.TypeAssertExpr)
2959 res, _ := s.dottype(n, false)
2960 return res
2961
2962 case ir.ODYNAMICDOTTYPE:
2963 n := n.(*ir.DynamicTypeAssertExpr)
2964 res, _ := s.dynamicDottype(n, false)
2965 return res
2966
2967
2968 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
2969 n := n.(*ir.BinaryExpr)
2970 a := s.expr(n.X)
2971 b := s.expr(n.Y)
2972 if n.X.Type().IsComplex() {
2973 pt := types.FloatForComplex(n.X.Type())
2974 op := s.ssaOp(ir.OEQ, pt)
2975 r := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b))
2976 i := s.newValueOrSfCall2(op, types.Types[types.TBOOL], s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b))
2977 c := s.newValue2(ssa.OpAndB, types.Types[types.TBOOL], r, i)
2978 switch n.Op() {
2979 case ir.OEQ:
2980 return c
2981 case ir.ONE:
2982 return s.newValue1(ssa.OpNot, types.Types[types.TBOOL], c)
2983 default:
2984 s.Fatalf("ordered complex compare %v", n.Op())
2985 }
2986 }
2987
2988
2989 op := n.Op()
2990 switch op {
2991 case ir.OGE:
2992 op, a, b = ir.OLE, b, a
2993 case ir.OGT:
2994 op, a, b = ir.OLT, b, a
2995 }
2996 if n.X.Type().IsFloat() {
2997
2998 return s.newValueOrSfCall2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
2999 }
3000
3001 return s.newValue2(s.ssaOp(op, n.X.Type()), types.Types[types.TBOOL], a, b)
3002 case ir.OMUL:
3003 n := n.(*ir.BinaryExpr)
3004 a := s.expr(n.X)
3005 b := s.expr(n.Y)
3006 if n.Type().IsComplex() {
3007 mulop := ssa.OpMul64F
3008 addop := ssa.OpAdd64F
3009 subop := ssa.OpSub64F
3010 pt := types.FloatForComplex(n.Type())
3011 wt := types.Types[types.TFLOAT64]
3012
3013 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3014 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3015 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3016 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3017
3018 if pt != wt {
3019 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3020 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3021 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3022 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3023 }
3024
3025 xreal := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3026 ximag := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, bimag), s.newValueOrSfCall2(mulop, wt, aimag, breal))
3027
3028 if pt != wt {
3029 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3030 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3031 }
3032
3033 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3034 }
3035
3036 if n.Type().IsFloat() {
3037 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3038 }
3039
3040 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3041
3042 case ir.ODIV:
3043 n := n.(*ir.BinaryExpr)
3044 a := s.expr(n.X)
3045 b := s.expr(n.Y)
3046 if n.Type().IsComplex() {
3047
3048
3049
3050 mulop := ssa.OpMul64F
3051 addop := ssa.OpAdd64F
3052 subop := ssa.OpSub64F
3053 divop := ssa.OpDiv64F
3054 pt := types.FloatForComplex(n.Type())
3055 wt := types.Types[types.TFLOAT64]
3056
3057 areal := s.newValue1(ssa.OpComplexReal, pt, a)
3058 breal := s.newValue1(ssa.OpComplexReal, pt, b)
3059 aimag := s.newValue1(ssa.OpComplexImag, pt, a)
3060 bimag := s.newValue1(ssa.OpComplexImag, pt, b)
3061
3062 if pt != wt {
3063 areal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, areal)
3064 breal = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, breal)
3065 aimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, aimag)
3066 bimag = s.newValueOrSfCall1(ssa.OpCvt32Fto64F, wt, bimag)
3067 }
3068
3069 denom := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, breal, breal), s.newValueOrSfCall2(mulop, wt, bimag, bimag))
3070 xreal := s.newValueOrSfCall2(addop, wt, s.newValueOrSfCall2(mulop, wt, areal, breal), s.newValueOrSfCall2(mulop, wt, aimag, bimag))
3071 ximag := s.newValueOrSfCall2(subop, wt, s.newValueOrSfCall2(mulop, wt, aimag, breal), s.newValueOrSfCall2(mulop, wt, areal, bimag))
3072
3073
3074
3075
3076
3077 xreal = s.newValueOrSfCall2(divop, wt, xreal, denom)
3078 ximag = s.newValueOrSfCall2(divop, wt, ximag, denom)
3079
3080 if pt != wt {
3081 xreal = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, xreal)
3082 ximag = s.newValueOrSfCall1(ssa.OpCvt64Fto32F, pt, ximag)
3083 }
3084 return s.newValue2(ssa.OpComplexMake, n.Type(), xreal, ximag)
3085 }
3086 if n.Type().IsFloat() {
3087 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3088 }
3089 return s.intDivide(n, a, b)
3090 case ir.OMOD:
3091 n := n.(*ir.BinaryExpr)
3092 a := s.expr(n.X)
3093 b := s.expr(n.Y)
3094 return s.intDivide(n, a, b)
3095 case ir.OADD, ir.OSUB:
3096 n := n.(*ir.BinaryExpr)
3097 a := s.expr(n.X)
3098 b := s.expr(n.Y)
3099 if n.Type().IsComplex() {
3100 pt := types.FloatForComplex(n.Type())
3101 op := s.ssaOp(n.Op(), pt)
3102 return s.newValue2(ssa.OpComplexMake, n.Type(),
3103 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexReal, pt, a), s.newValue1(ssa.OpComplexReal, pt, b)),
3104 s.newValueOrSfCall2(op, pt, s.newValue1(ssa.OpComplexImag, pt, a), s.newValue1(ssa.OpComplexImag, pt, b)))
3105 }
3106 if n.Type().IsFloat() {
3107 return s.newValueOrSfCall2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3108 }
3109 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3110 case ir.OAND, ir.OOR, ir.OXOR:
3111 n := n.(*ir.BinaryExpr)
3112 a := s.expr(n.X)
3113 b := s.expr(n.Y)
3114 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
3115 case ir.OANDNOT:
3116 n := n.(*ir.BinaryExpr)
3117 a := s.expr(n.X)
3118 b := s.expr(n.Y)
3119 b = s.newValue1(s.ssaOp(ir.OBITNOT, b.Type), b.Type, b)
3120 return s.newValue2(s.ssaOp(ir.OAND, n.Type()), a.Type, a, b)
3121 case ir.OLSH, ir.ORSH:
3122 n := n.(*ir.BinaryExpr)
3123 a := s.expr(n.X)
3124 b := s.expr(n.Y)
3125 bt := b.Type
3126 if bt.IsSigned() {
3127 cmp := s.newValue2(s.ssaOp(ir.OLE, bt), types.Types[types.TBOOL], s.zeroVal(bt), b)
3128 s.check(cmp, ir.Syms.Panicshift)
3129 bt = bt.ToUnsigned()
3130 }
3131 return s.newValue2(s.ssaShiftOp(n.Op(), n.Type(), bt), a.Type, a, b)
3132 case ir.OANDAND, ir.OOROR:
3133
3134
3135
3136
3137
3138
3139
3140
3141
3142
3143
3144
3145
3146 n := n.(*ir.LogicalExpr)
3147 el := s.expr(n.X)
3148 s.vars[n] = el
3149
3150 b := s.endBlock()
3151 b.Kind = ssa.BlockIf
3152 b.SetControl(el)
3153
3154
3155
3156
3157
3158 bRight := s.f.NewBlock(ssa.BlockPlain)
3159 bResult := s.f.NewBlock(ssa.BlockPlain)
3160 if n.Op() == ir.OANDAND {
3161 b.AddEdgeTo(bRight)
3162 b.AddEdgeTo(bResult)
3163 } else if n.Op() == ir.OOROR {
3164 b.AddEdgeTo(bResult)
3165 b.AddEdgeTo(bRight)
3166 }
3167
3168 s.startBlock(bRight)
3169 er := s.expr(n.Y)
3170 s.vars[n] = er
3171
3172 b = s.endBlock()
3173 b.AddEdgeTo(bResult)
3174
3175 s.startBlock(bResult)
3176 return s.variable(n, types.Types[types.TBOOL])
3177 case ir.OCOMPLEX:
3178 n := n.(*ir.BinaryExpr)
3179 r := s.expr(n.X)
3180 i := s.expr(n.Y)
3181 return s.newValue2(ssa.OpComplexMake, n.Type(), r, i)
3182
3183
3184 case ir.ONEG:
3185 n := n.(*ir.UnaryExpr)
3186 a := s.expr(n.X)
3187 if n.Type().IsComplex() {
3188 tp := types.FloatForComplex(n.Type())
3189 negop := s.ssaOp(n.Op(), tp)
3190 return s.newValue2(ssa.OpComplexMake, n.Type(),
3191 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexReal, tp, a)),
3192 s.newValue1(negop, tp, s.newValue1(ssa.OpComplexImag, tp, a)))
3193 }
3194 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3195 case ir.ONOT, ir.OBITNOT:
3196 n := n.(*ir.UnaryExpr)
3197 a := s.expr(n.X)
3198 return s.newValue1(s.ssaOp(n.Op(), n.Type()), a.Type, a)
3199 case ir.OIMAG, ir.OREAL:
3200 n := n.(*ir.UnaryExpr)
3201 a := s.expr(n.X)
3202 return s.newValue1(s.ssaOp(n.Op(), n.X.Type()), n.Type(), a)
3203 case ir.OPLUS:
3204 n := n.(*ir.UnaryExpr)
3205 return s.expr(n.X)
3206
3207 case ir.OADDR:
3208 n := n.(*ir.AddrExpr)
3209 return s.addr(n.X)
3210
3211 case ir.ORESULT:
3212 n := n.(*ir.ResultExpr)
3213 if s.prevCall == nil || s.prevCall.Op != ssa.OpStaticLECall && s.prevCall.Op != ssa.OpInterLECall && s.prevCall.Op != ssa.OpClosureLECall {
3214 panic("Expected to see a previous call")
3215 }
3216 which := n.Index
3217 if which == -1 {
3218 panic(fmt.Errorf("ORESULT %v does not match call %s", n, s.prevCall))
3219 }
3220 return s.resultOfCall(s.prevCall, which, n.Type())
3221
3222 case ir.ODEREF:
3223 n := n.(*ir.StarExpr)
3224 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3225 return s.load(n.Type(), p)
3226
3227 case ir.ODOT:
3228 n := n.(*ir.SelectorExpr)
3229 if n.X.Op() == ir.OSTRUCTLIT {
3230
3231
3232
3233 if !ir.IsZero(n.X) {
3234 s.Fatalf("literal with nonzero value in SSA: %v", n.X)
3235 }
3236 return s.zeroVal(n.Type())
3237 }
3238
3239
3240
3241
3242 if ir.IsAddressable(n) && !s.canSSA(n) {
3243 p := s.addr(n)
3244 return s.load(n.Type(), p)
3245 }
3246 v := s.expr(n.X)
3247 return s.newValue1I(ssa.OpStructSelect, n.Type(), int64(fieldIdx(n)), v)
3248
3249 case ir.ODOTPTR:
3250 n := n.(*ir.SelectorExpr)
3251 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
3252 p = s.newValue1I(ssa.OpOffPtr, types.NewPtr(n.Type()), n.Offset(), p)
3253 return s.load(n.Type(), p)
3254
3255 case ir.OINDEX:
3256 n := n.(*ir.IndexExpr)
3257 switch {
3258 case n.X.Type().IsString():
3259 if n.Bounded() && ir.IsConst(n.X, constant.String) && ir.IsConst(n.Index, constant.Int) {
3260
3261
3262
3263 return s.newValue0I(ssa.OpConst8, types.Types[types.TUINT8], int64(int8(ir.StringVal(n.X)[ir.Int64Val(n.Index)])))
3264 }
3265 a := s.expr(n.X)
3266 i := s.expr(n.Index)
3267 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], a)
3268 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3269 ptrtyp := s.f.Config.Types.BytePtr
3270 ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
3271 if ir.IsConst(n.Index, constant.Int) {
3272 ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, ir.Int64Val(n.Index), ptr)
3273 } else {
3274 ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
3275 }
3276 return s.load(types.Types[types.TUINT8], ptr)
3277 case n.X.Type().IsSlice():
3278 p := s.addr(n)
3279 return s.load(n.X.Type().Elem(), p)
3280 case n.X.Type().IsArray():
3281 if ssa.CanSSA(n.X.Type()) {
3282
3283 bound := n.X.Type().NumElem()
3284 a := s.expr(n.X)
3285 i := s.expr(n.Index)
3286 if bound == 0 {
3287
3288
3289 z := s.constInt(types.Types[types.TINT], 0)
3290 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3291
3292
3293 return s.zeroVal(n.Type())
3294 }
3295 len := s.constInt(types.Types[types.TINT], bound)
3296 s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
3297 return s.newValue1I(ssa.OpArraySelect, n.Type(), 0, a)
3298 }
3299 p := s.addr(n)
3300 return s.load(n.X.Type().Elem(), p)
3301 default:
3302 s.Fatalf("bad type for index %v", n.X.Type())
3303 return nil
3304 }
3305
3306 case ir.OLEN, ir.OCAP:
3307 n := n.(*ir.UnaryExpr)
3308 switch {
3309 case n.X.Type().IsSlice():
3310 op := ssa.OpSliceLen
3311 if n.Op() == ir.OCAP {
3312 op = ssa.OpSliceCap
3313 }
3314 return s.newValue1(op, types.Types[types.TINT], s.expr(n.X))
3315 case n.X.Type().IsString():
3316 return s.newValue1(ssa.OpStringLen, types.Types[types.TINT], s.expr(n.X))
3317 case n.X.Type().IsMap(), n.X.Type().IsChan():
3318 return s.referenceTypeBuiltin(n, s.expr(n.X))
3319 default:
3320 return s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
3321 }
3322
3323 case ir.OSPTR:
3324 n := n.(*ir.UnaryExpr)
3325 a := s.expr(n.X)
3326 if n.X.Type().IsSlice() {
3327 if n.Bounded() {
3328 return s.newValue1(ssa.OpSlicePtr, n.Type(), a)
3329 }
3330 return s.newValue1(ssa.OpSlicePtrUnchecked, n.Type(), a)
3331 } else {
3332 return s.newValue1(ssa.OpStringPtr, n.Type(), a)
3333 }
3334
3335 case ir.OITAB:
3336 n := n.(*ir.UnaryExpr)
3337 a := s.expr(n.X)
3338 return s.newValue1(ssa.OpITab, n.Type(), a)
3339
3340 case ir.OIDATA:
3341 n := n.(*ir.UnaryExpr)
3342 a := s.expr(n.X)
3343 return s.newValue1(ssa.OpIData, n.Type(), a)
3344
3345 case ir.OMAKEFACE:
3346 n := n.(*ir.BinaryExpr)
3347 tab := s.expr(n.X)
3348 data := s.expr(n.Y)
3349 return s.newValue2(ssa.OpIMake, n.Type(), tab, data)
3350
3351 case ir.OSLICEHEADER:
3352 n := n.(*ir.SliceHeaderExpr)
3353 p := s.expr(n.Ptr)
3354 l := s.expr(n.Len)
3355 c := s.expr(n.Cap)
3356 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3357
3358 case ir.OSTRINGHEADER:
3359 n := n.(*ir.StringHeaderExpr)
3360 p := s.expr(n.Ptr)
3361 l := s.expr(n.Len)
3362 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3363
3364 case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR:
3365 n := n.(*ir.SliceExpr)
3366 check := s.checkPtrEnabled && n.Op() == ir.OSLICE3ARR && n.X.Op() == ir.OCONVNOP && n.X.(*ir.ConvExpr).X.Type().IsUnsafePtr()
3367 v := s.exprCheckPtr(n.X, !check)
3368 var i, j, k *ssa.Value
3369 if n.Low != nil {
3370 i = s.expr(n.Low)
3371 }
3372 if n.High != nil {
3373 j = s.expr(n.High)
3374 }
3375 if n.Max != nil {
3376 k = s.expr(n.Max)
3377 }
3378 p, l, c := s.slice(v, i, j, k, n.Bounded())
3379 if check {
3380
3381 s.checkPtrAlignment(n.X.(*ir.ConvExpr), v, s.conv(n.Max, k, k.Type, types.Types[types.TUINTPTR]))
3382 }
3383 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3384
3385 case ir.OSLICESTR:
3386 n := n.(*ir.SliceExpr)
3387 v := s.expr(n.X)
3388 var i, j *ssa.Value
3389 if n.Low != nil {
3390 i = s.expr(n.Low)
3391 }
3392 if n.High != nil {
3393 j = s.expr(n.High)
3394 }
3395 p, l, _ := s.slice(v, i, j, nil, n.Bounded())
3396 return s.newValue2(ssa.OpStringMake, n.Type(), p, l)
3397
3398 case ir.OSLICE2ARRPTR:
3399
3400
3401
3402
3403 n := n.(*ir.ConvExpr)
3404 v := s.expr(n.X)
3405 nelem := n.Type().Elem().NumElem()
3406 arrlen := s.constInt(types.Types[types.TINT], nelem)
3407 cap := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
3408 s.boundsCheck(arrlen, cap, ssa.BoundsConvert, false)
3409 op := ssa.OpSlicePtr
3410 if nelem == 0 {
3411 op = ssa.OpSlicePtrUnchecked
3412 }
3413 return s.newValue1(op, n.Type(), v)
3414
3415 case ir.OCALLFUNC:
3416 n := n.(*ir.CallExpr)
3417 if ir.IsIntrinsicCall(n) {
3418 return s.intrinsicCall(n)
3419 }
3420 fallthrough
3421
3422 case ir.OCALLINTER:
3423 n := n.(*ir.CallExpr)
3424 return s.callResult(n, callNormal)
3425
3426 case ir.OGETG:
3427 n := n.(*ir.CallExpr)
3428 return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
3429
3430 case ir.OGETCALLERPC:
3431 n := n.(*ir.CallExpr)
3432 return s.newValue0(ssa.OpGetCallerPC, n.Type())
3433
3434 case ir.OGETCALLERSP:
3435 n := n.(*ir.CallExpr)
3436 return s.newValue1(ssa.OpGetCallerSP, n.Type(), s.mem())
3437
3438 case ir.OAPPEND:
3439 return s.append(n.(*ir.CallExpr), false)
3440
3441 case ir.OMIN, ir.OMAX:
3442 return s.minMax(n.(*ir.CallExpr))
3443
3444 case ir.OSTRUCTLIT, ir.OARRAYLIT:
3445
3446
3447
3448 n := n.(*ir.CompLitExpr)
3449 if !ir.IsZero(n) {
3450 s.Fatalf("literal with nonzero value in SSA: %v", n)
3451 }
3452 return s.zeroVal(n.Type())
3453
3454 case ir.ONEW:
3455 n := n.(*ir.UnaryExpr)
3456 var rtype *ssa.Value
3457 if x, ok := n.X.(*ir.DynamicType); ok && x.Op() == ir.ODYNAMICTYPE {
3458 rtype = s.expr(x.RType)
3459 }
3460 return s.newObject(n.Type().Elem(), rtype)
3461
3462 case ir.OUNSAFEADD:
3463 n := n.(*ir.BinaryExpr)
3464 ptr := s.expr(n.X)
3465 len := s.expr(n.Y)
3466
3467
3468
3469 len = s.conv(n, len, len.Type, types.Types[types.TUINTPTR])
3470
3471 return s.newValue2(ssa.OpAddPtr, n.Type(), ptr, len)
3472
3473 default:
3474 s.Fatalf("unhandled expr %v", n.Op())
3475 return nil
3476 }
3477 }
3478
3479 func (s *state) resultOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3480 aux := c.Aux.(*ssa.AuxCall)
3481 pa := aux.ParamAssignmentForResult(which)
3482
3483
3484 if len(pa.Registers) == 0 && !ssa.CanSSA(t) {
3485 addr := s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3486 return s.rawLoad(t, addr)
3487 }
3488 return s.newValue1I(ssa.OpSelectN, t, which, c)
3489 }
3490
3491 func (s *state) resultAddrOfCall(c *ssa.Value, which int64, t *types.Type) *ssa.Value {
3492 aux := c.Aux.(*ssa.AuxCall)
3493 pa := aux.ParamAssignmentForResult(which)
3494 if len(pa.Registers) == 0 {
3495 return s.newValue1I(ssa.OpSelectNAddr, types.NewPtr(t), which, c)
3496 }
3497 _, addr := s.temp(c.Pos, t)
3498 rval := s.newValue1I(ssa.OpSelectN, t, which, c)
3499 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, addr, rval, s.mem(), false)
3500 return addr
3501 }
3502
3503
3504
3505
3506
3507
3508
3509
3510
3511 func (s *state) append(n *ir.CallExpr, inplace bool) *ssa.Value {
3512
3513
3514
3515
3516
3517
3518
3519
3520
3521
3522
3523
3524
3525
3526
3527
3528
3529
3530
3531
3532
3533
3534
3535
3536
3537
3538
3539
3540
3541
3542
3543
3544 et := n.Type().Elem()
3545 pt := types.NewPtr(et)
3546
3547
3548 sn := n.Args[0]
3549 var slice, addr *ssa.Value
3550 if inplace {
3551 addr = s.addr(sn)
3552 slice = s.load(n.Type(), addr)
3553 } else {
3554 slice = s.expr(sn)
3555 }
3556
3557
3558 grow := s.f.NewBlock(ssa.BlockPlain)
3559 assign := s.f.NewBlock(ssa.BlockPlain)
3560
3561
3562 p := s.newValue1(ssa.OpSlicePtr, pt, slice)
3563 l := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], slice)
3564 c := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], slice)
3565
3566
3567 nargs := s.constInt(types.Types[types.TINT], int64(len(n.Args)-1))
3568 l = s.newValue2(s.ssaOp(ir.OADD, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3569
3570
3571 cmp := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT]), types.Types[types.TBOOL], c, l)
3572
3573
3574 s.vars[ptrVar] = p
3575 s.vars[lenVar] = l
3576 if !inplace {
3577 s.vars[capVar] = c
3578 }
3579
3580 b := s.endBlock()
3581 b.Kind = ssa.BlockIf
3582 b.Likely = ssa.BranchUnlikely
3583 b.SetControl(cmp)
3584 b.AddEdgeTo(grow)
3585 b.AddEdgeTo(assign)
3586
3587
3588 s.startBlock(grow)
3589 taddr := s.expr(n.Fun)
3590 r := s.rtcall(ir.Syms.Growslice, true, []*types.Type{n.Type()}, p, l, c, nargs, taddr)
3591
3592
3593 p = s.newValue1(ssa.OpSlicePtr, pt, r[0])
3594 l = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], r[0])
3595 c = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], r[0])
3596
3597 s.vars[ptrVar] = p
3598 s.vars[lenVar] = l
3599 s.vars[capVar] = c
3600 if inplace {
3601 if sn.Op() == ir.ONAME {
3602 sn := sn.(*ir.Name)
3603 if sn.Class != ir.PEXTERN {
3604
3605 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, sn, s.mem())
3606 }
3607 }
3608 capaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceCapOffset, addr)
3609 s.store(types.Types[types.TINT], capaddr, c)
3610 s.store(pt, addr, p)
3611 }
3612
3613 b = s.endBlock()
3614 b.AddEdgeTo(assign)
3615
3616
3617 s.startBlock(assign)
3618 p = s.variable(ptrVar, pt)
3619 l = s.variable(lenVar, types.Types[types.TINT])
3620 if !inplace {
3621 c = s.variable(capVar, types.Types[types.TINT])
3622 }
3623
3624 if inplace {
3625
3626
3627 lenaddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, types.SliceLenOffset, addr)
3628 s.store(types.Types[types.TINT], lenaddr, l)
3629 }
3630
3631
3632 type argRec struct {
3633
3634
3635 v *ssa.Value
3636 store bool
3637 }
3638 args := make([]argRec, 0, len(n.Args[1:]))
3639 for _, n := range n.Args[1:] {
3640 if ssa.CanSSA(n.Type()) {
3641 args = append(args, argRec{v: s.expr(n), store: true})
3642 } else {
3643 v := s.addr(n)
3644 args = append(args, argRec{v: v})
3645 }
3646 }
3647
3648
3649 oldLen := s.newValue2(s.ssaOp(ir.OSUB, types.Types[types.TINT]), types.Types[types.TINT], l, nargs)
3650 p2 := s.newValue2(ssa.OpPtrIndex, pt, p, oldLen)
3651 for i, arg := range args {
3652 addr := s.newValue2(ssa.OpPtrIndex, pt, p2, s.constInt(types.Types[types.TINT], int64(i)))
3653 if arg.store {
3654 s.storeType(et, addr, arg.v, 0, true)
3655 } else {
3656 s.move(et, addr, arg.v)
3657 }
3658 }
3659
3660
3661
3662
3663
3664 delete(s.vars, ptrVar)
3665 delete(s.vars, lenVar)
3666 if !inplace {
3667 delete(s.vars, capVar)
3668 }
3669
3670
3671 if inplace {
3672 return nil
3673 }
3674 return s.newValue3(ssa.OpSliceMake, n.Type(), p, l, c)
3675 }
3676
3677
3678 func (s *state) minMax(n *ir.CallExpr) *ssa.Value {
3679
3680
3681
3682 fold := func(op func(x, a *ssa.Value) *ssa.Value) *ssa.Value {
3683 x := s.expr(n.Args[0])
3684 for _, arg := range n.Args[1:] {
3685 x = op(x, s.expr(arg))
3686 }
3687 return x
3688 }
3689
3690 typ := n.Type()
3691
3692 if typ.IsFloat() || typ.IsString() {
3693
3694
3695
3696
3697
3698
3699
3700
3701 if typ.IsFloat() {
3702 switch Arch.LinkArch.Family {
3703 case sys.AMD64, sys.ARM64:
3704 var op ssa.Op
3705 switch {
3706 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMIN:
3707 op = ssa.OpMin64F
3708 case typ.Kind() == types.TFLOAT64 && n.Op() == ir.OMAX:
3709 op = ssa.OpMax64F
3710 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMIN:
3711 op = ssa.OpMin32F
3712 case typ.Kind() == types.TFLOAT32 && n.Op() == ir.OMAX:
3713 op = ssa.OpMax32F
3714 }
3715 return fold(func(x, a *ssa.Value) *ssa.Value {
3716 return s.newValue2(op, typ, x, a)
3717 })
3718 }
3719 }
3720 var name string
3721 switch typ.Kind() {
3722 case types.TFLOAT32:
3723 switch n.Op() {
3724 case ir.OMIN:
3725 name = "fmin32"
3726 case ir.OMAX:
3727 name = "fmax32"
3728 }
3729 case types.TFLOAT64:
3730 switch n.Op() {
3731 case ir.OMIN:
3732 name = "fmin64"
3733 case ir.OMAX:
3734 name = "fmax64"
3735 }
3736 case types.TSTRING:
3737 switch n.Op() {
3738 case ir.OMIN:
3739 name = "strmin"
3740 case ir.OMAX:
3741 name = "strmax"
3742 }
3743 }
3744 fn := typecheck.LookupRuntimeFunc(name)
3745
3746 return fold(func(x, a *ssa.Value) *ssa.Value {
3747 return s.rtcall(fn, true, []*types.Type{typ}, x, a)[0]
3748 })
3749 }
3750
3751 lt := s.ssaOp(ir.OLT, typ)
3752
3753 return fold(func(x, a *ssa.Value) *ssa.Value {
3754 switch n.Op() {
3755 case ir.OMIN:
3756
3757 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], a, x), a, x)
3758 case ir.OMAX:
3759
3760 return s.ternary(s.newValue2(lt, types.Types[types.TBOOL], x, a), a, x)
3761 }
3762 panic("unreachable")
3763 })
3764 }
3765
3766
3767 func (s *state) ternary(cond, x, y *ssa.Value) *ssa.Value {
3768
3769
3770 ternaryVar := ssaMarker("ternary")
3771
3772 bThen := s.f.NewBlock(ssa.BlockPlain)
3773 bElse := s.f.NewBlock(ssa.BlockPlain)
3774 bEnd := s.f.NewBlock(ssa.BlockPlain)
3775
3776 b := s.endBlock()
3777 b.Kind = ssa.BlockIf
3778 b.SetControl(cond)
3779 b.AddEdgeTo(bThen)
3780 b.AddEdgeTo(bElse)
3781
3782 s.startBlock(bThen)
3783 s.vars[ternaryVar] = x
3784 s.endBlock().AddEdgeTo(bEnd)
3785
3786 s.startBlock(bElse)
3787 s.vars[ternaryVar] = y
3788 s.endBlock().AddEdgeTo(bEnd)
3789
3790 s.startBlock(bEnd)
3791 r := s.variable(ternaryVar, x.Type)
3792 delete(s.vars, ternaryVar)
3793 return r
3794 }
3795
3796
3797
3798
3799
3800 func (s *state) condBranch(cond ir.Node, yes, no *ssa.Block, likely int8) {
3801 switch cond.Op() {
3802 case ir.OANDAND:
3803 cond := cond.(*ir.LogicalExpr)
3804 mid := s.f.NewBlock(ssa.BlockPlain)
3805 s.stmtList(cond.Init())
3806 s.condBranch(cond.X, mid, no, max8(likely, 0))
3807 s.startBlock(mid)
3808 s.condBranch(cond.Y, yes, no, likely)
3809 return
3810
3811
3812
3813
3814
3815
3816 case ir.OOROR:
3817 cond := cond.(*ir.LogicalExpr)
3818 mid := s.f.NewBlock(ssa.BlockPlain)
3819 s.stmtList(cond.Init())
3820 s.condBranch(cond.X, yes, mid, min8(likely, 0))
3821 s.startBlock(mid)
3822 s.condBranch(cond.Y, yes, no, likely)
3823 return
3824
3825
3826
3827 case ir.ONOT:
3828 cond := cond.(*ir.UnaryExpr)
3829 s.stmtList(cond.Init())
3830 s.condBranch(cond.X, no, yes, -likely)
3831 return
3832 case ir.OCONVNOP:
3833 cond := cond.(*ir.ConvExpr)
3834 s.stmtList(cond.Init())
3835 s.condBranch(cond.X, yes, no, likely)
3836 return
3837 }
3838 c := s.expr(cond)
3839 b := s.endBlock()
3840 b.Kind = ssa.BlockIf
3841 b.SetControl(c)
3842 b.Likely = ssa.BranchPrediction(likely)
3843 b.AddEdgeTo(yes)
3844 b.AddEdgeTo(no)
3845 }
3846
3847 type skipMask uint8
3848
3849 const (
3850 skipPtr skipMask = 1 << iota
3851 skipLen
3852 skipCap
3853 )
3854
3855
3856
3857
3858
3859
3860
3861 func (s *state) assign(left ir.Node, right *ssa.Value, deref bool, skip skipMask) {
3862 s.assignWhichMayOverlap(left, right, deref, skip, false)
3863 }
3864 func (s *state) assignWhichMayOverlap(left ir.Node, right *ssa.Value, deref bool, skip skipMask, mayOverlap bool) {
3865 if left.Op() == ir.ONAME && ir.IsBlank(left) {
3866 return
3867 }
3868 t := left.Type()
3869 types.CalcSize(t)
3870 if s.canSSA(left) {
3871 if deref {
3872 s.Fatalf("can SSA LHS %v but not RHS %s", left, right)
3873 }
3874 if left.Op() == ir.ODOT {
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885 left := left.(*ir.SelectorExpr)
3886 t := left.X.Type()
3887 nf := t.NumFields()
3888 idx := fieldIdx(left)
3889
3890
3891 old := s.expr(left.X)
3892
3893
3894 new := s.newValue0(ssa.StructMakeOp(t.NumFields()), t)
3895
3896
3897 for i := 0; i < nf; i++ {
3898 if i == idx {
3899 new.AddArg(right)
3900 } else {
3901 new.AddArg(s.newValue1I(ssa.OpStructSelect, t.FieldType(i), int64(i), old))
3902 }
3903 }
3904
3905
3906 s.assign(left.X, new, false, 0)
3907
3908 return
3909 }
3910 if left.Op() == ir.OINDEX && left.(*ir.IndexExpr).X.Type().IsArray() {
3911 left := left.(*ir.IndexExpr)
3912 s.pushLine(left.Pos())
3913 defer s.popLine()
3914
3915
3916 t := left.X.Type()
3917 n := t.NumElem()
3918
3919 i := s.expr(left.Index)
3920 if n == 0 {
3921
3922
3923 z := s.constInt(types.Types[types.TINT], 0)
3924 s.boundsCheck(z, z, ssa.BoundsIndex, false)
3925 return
3926 }
3927 if n != 1 {
3928 s.Fatalf("assigning to non-1-length array")
3929 }
3930
3931 len := s.constInt(types.Types[types.TINT], 1)
3932 s.boundsCheck(i, len, ssa.BoundsIndex, false)
3933 v := s.newValue1(ssa.OpArrayMake1, t, right)
3934 s.assign(left.X, v, false, 0)
3935 return
3936 }
3937 left := left.(*ir.Name)
3938
3939 s.vars[left] = right
3940 s.addNamedValue(left, right)
3941 return
3942 }
3943
3944
3945
3946 if base, ok := clobberBase(left).(*ir.Name); ok && base.OnStack() && skip == 0 && t.HasPointers() {
3947 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, base, s.mem(), !ir.IsAutoTmp(base))
3948 }
3949
3950
3951 addr := s.addr(left)
3952 if ir.IsReflectHeaderDataField(left) {
3953
3954
3955
3956
3957
3958 t = types.Types[types.TUNSAFEPTR]
3959 }
3960 if deref {
3961
3962 if right == nil {
3963 s.zero(t, addr)
3964 } else {
3965 s.moveWhichMayOverlap(t, addr, right, mayOverlap)
3966 }
3967 return
3968 }
3969
3970 s.storeType(t, addr, right, skip, !ir.IsAutoTmp(left))
3971 }
3972
3973
3974 func (s *state) zeroVal(t *types.Type) *ssa.Value {
3975 switch {
3976 case t.IsInteger():
3977 switch t.Size() {
3978 case 1:
3979 return s.constInt8(t, 0)
3980 case 2:
3981 return s.constInt16(t, 0)
3982 case 4:
3983 return s.constInt32(t, 0)
3984 case 8:
3985 return s.constInt64(t, 0)
3986 default:
3987 s.Fatalf("bad sized integer type %v", t)
3988 }
3989 case t.IsFloat():
3990 switch t.Size() {
3991 case 4:
3992 return s.constFloat32(t, 0)
3993 case 8:
3994 return s.constFloat64(t, 0)
3995 default:
3996 s.Fatalf("bad sized float type %v", t)
3997 }
3998 case t.IsComplex():
3999 switch t.Size() {
4000 case 8:
4001 z := s.constFloat32(types.Types[types.TFLOAT32], 0)
4002 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4003 case 16:
4004 z := s.constFloat64(types.Types[types.TFLOAT64], 0)
4005 return s.entryNewValue2(ssa.OpComplexMake, t, z, z)
4006 default:
4007 s.Fatalf("bad sized complex type %v", t)
4008 }
4009
4010 case t.IsString():
4011 return s.constEmptyString(t)
4012 case t.IsPtrShaped():
4013 return s.constNil(t)
4014 case t.IsBoolean():
4015 return s.constBool(false)
4016 case t.IsInterface():
4017 return s.constInterface(t)
4018 case t.IsSlice():
4019 return s.constSlice(t)
4020 case t.IsStruct():
4021 n := t.NumFields()
4022 v := s.entryNewValue0(ssa.StructMakeOp(t.NumFields()), t)
4023 for i := 0; i < n; i++ {
4024 v.AddArg(s.zeroVal(t.FieldType(i)))
4025 }
4026 return v
4027 case t.IsArray():
4028 switch t.NumElem() {
4029 case 0:
4030 return s.entryNewValue0(ssa.OpArrayMake0, t)
4031 case 1:
4032 return s.entryNewValue1(ssa.OpArrayMake1, t, s.zeroVal(t.Elem()))
4033 }
4034 }
4035 s.Fatalf("zero for type %v not implemented", t)
4036 return nil
4037 }
4038
4039 type callKind int8
4040
4041 const (
4042 callNormal callKind = iota
4043 callDefer
4044 callDeferStack
4045 callGo
4046 callTail
4047 )
4048
4049 type sfRtCallDef struct {
4050 rtfn *obj.LSym
4051 rtype types.Kind
4052 }
4053
4054 var softFloatOps map[ssa.Op]sfRtCallDef
4055
4056 func softfloatInit() {
4057
4058 softFloatOps = map[ssa.Op]sfRtCallDef{
4059 ssa.OpAdd32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4060 ssa.OpAdd64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4061 ssa.OpSub32F: {typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
4062 ssa.OpSub64F: {typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
4063 ssa.OpMul32F: {typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
4064 ssa.OpMul64F: {typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
4065 ssa.OpDiv32F: {typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
4066 ssa.OpDiv64F: {typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
4067
4068 ssa.OpEq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4069 ssa.OpEq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4070 ssa.OpNeq64F: {typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
4071 ssa.OpNeq32F: {typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
4072 ssa.OpLess64F: {typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
4073 ssa.OpLess32F: {typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
4074 ssa.OpLeq64F: {typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
4075 ssa.OpLeq32F: {typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
4076
4077 ssa.OpCvt32to32F: {typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
4078 ssa.OpCvt32Fto32: {typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
4079 ssa.OpCvt64to32F: {typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
4080 ssa.OpCvt32Fto64: {typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
4081 ssa.OpCvt64Uto32F: {typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
4082 ssa.OpCvt32Fto64U: {typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
4083 ssa.OpCvt32to64F: {typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
4084 ssa.OpCvt64Fto32: {typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
4085 ssa.OpCvt64to64F: {typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
4086 ssa.OpCvt64Fto64: {typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
4087 ssa.OpCvt64Uto64F: {typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
4088 ssa.OpCvt64Fto64U: {typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
4089 ssa.OpCvt32Fto64F: {typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
4090 ssa.OpCvt64Fto32F: {typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
4091 }
4092 }
4093
4094
4095
4096 func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
4097 f2i := func(t *types.Type) *types.Type {
4098 switch t.Kind() {
4099 case types.TFLOAT32:
4100 return types.Types[types.TUINT32]
4101 case types.TFLOAT64:
4102 return types.Types[types.TUINT64]
4103 }
4104 return t
4105 }
4106
4107 if callDef, ok := softFloatOps[op]; ok {
4108 switch op {
4109 case ssa.OpLess32F,
4110 ssa.OpLess64F,
4111 ssa.OpLeq32F,
4112 ssa.OpLeq64F:
4113 args[0], args[1] = args[1], args[0]
4114 case ssa.OpSub32F,
4115 ssa.OpSub64F:
4116 args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
4117 }
4118
4119
4120
4121 for i, a := range args {
4122 if a.Type.IsFloat() {
4123 args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
4124 }
4125 }
4126
4127 rt := types.Types[callDef.rtype]
4128 result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
4129 if rt.IsFloat() {
4130 result = s.newValue1(ssa.OpCopy, rt, result)
4131 }
4132 if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
4133 result = s.newValue1(ssa.OpNot, result.Type, result)
4134 }
4135 return result, true
4136 }
4137 return nil, false
4138 }
4139
4140 var intrinsics map[intrinsicKey]intrinsicBuilder
4141
4142
4143
4144 type intrinsicBuilder func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value
4145
4146 type intrinsicKey struct {
4147 arch *sys.Arch
4148 pkg string
4149 fn string
4150 }
4151
4152 func InitTables() {
4153 intrinsics = map[intrinsicKey]intrinsicBuilder{}
4154
4155 var all []*sys.Arch
4156 var p4 []*sys.Arch
4157 var p8 []*sys.Arch
4158 var lwatomics []*sys.Arch
4159 for _, a := range &sys.Archs {
4160 all = append(all, a)
4161 if a.PtrSize == 4 {
4162 p4 = append(p4, a)
4163 } else {
4164 p8 = append(p8, a)
4165 }
4166 if a.Family != sys.PPC64 {
4167 lwatomics = append(lwatomics, a)
4168 }
4169 }
4170
4171
4172 add := func(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
4173 for _, a := range archs {
4174 intrinsics[intrinsicKey{a, pkg, fn}] = b
4175 }
4176 }
4177
4178 addF := func(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
4179 m := 0
4180 for _, f := range archFamilies {
4181 if f >= 32 {
4182 panic("too many architecture families")
4183 }
4184 m |= 1 << uint(f)
4185 }
4186 for _, a := range all {
4187 if m>>uint(a.Family)&1 != 0 {
4188 intrinsics[intrinsicKey{a, pkg, fn}] = b
4189 }
4190 }
4191 }
4192
4193 alias := func(pkg, fn, pkg2, fn2 string, archs ...*sys.Arch) {
4194 aliased := false
4195 for _, a := range archs {
4196 if b, ok := intrinsics[intrinsicKey{a, pkg2, fn2}]; ok {
4197 intrinsics[intrinsicKey{a, pkg, fn}] = b
4198 aliased = true
4199 }
4200 }
4201 if !aliased {
4202 panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
4203 }
4204 }
4205
4206
4207 if !base.Flag.Cfg.Instrumenting {
4208 add("runtime", "slicebytetostringtmp",
4209 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4210
4211
4212
4213 return s.newValue2(ssa.OpStringMake, n.Type(), args[0], args[1])
4214 },
4215 all...)
4216 }
4217 addF("runtime/internal/math", "MulUintptr",
4218 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4219 if s.config.PtrSize == 4 {
4220 return s.newValue2(ssa.OpMul32uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4221 }
4222 return s.newValue2(ssa.OpMul64uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
4223 },
4224 sys.AMD64, sys.I386, sys.Loong64, sys.MIPS64, sys.RISCV64, sys.ARM64)
4225 add("runtime", "KeepAlive",
4226 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4227 data := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, args[0])
4228 s.vars[memVar] = s.newValue2(ssa.OpKeepAlive, types.TypeMem, data, s.mem())
4229 return nil
4230 },
4231 all...)
4232 add("runtime", "getclosureptr",
4233 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4234 return s.newValue0(ssa.OpGetClosurePtr, s.f.Config.Types.Uintptr)
4235 },
4236 all...)
4237
4238 add("runtime", "getcallerpc",
4239 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4240 return s.newValue0(ssa.OpGetCallerPC, s.f.Config.Types.Uintptr)
4241 },
4242 all...)
4243
4244 add("runtime", "getcallersp",
4245 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4246 return s.newValue1(ssa.OpGetCallerSP, s.f.Config.Types.Uintptr, s.mem())
4247 },
4248 all...)
4249
4250 addF("runtime", "publicationBarrier",
4251 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4252 s.vars[memVar] = s.newValue1(ssa.OpPubBarrier, types.TypeMem, s.mem())
4253 return nil
4254 },
4255 sys.ARM64, sys.PPC64, sys.RISCV64)
4256
4257 brev_arch := []sys.ArchFamily{sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X}
4258 if buildcfg.GOPPC64 >= 10 {
4259
4260
4261 brev_arch = append(brev_arch, sys.PPC64)
4262 }
4263
4264 addF("runtime/internal/sys", "Bswap32",
4265 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4266 return s.newValue1(ssa.OpBswap32, types.Types[types.TUINT32], args[0])
4267 },
4268 brev_arch...)
4269 addF("runtime/internal/sys", "Bswap64",
4270 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4271 return s.newValue1(ssa.OpBswap64, types.Types[types.TUINT64], args[0])
4272 },
4273 brev_arch...)
4274
4275
4276 makePrefetchFunc := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4277 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4278 s.vars[memVar] = s.newValue2(op, types.TypeMem, args[0], s.mem())
4279 return nil
4280 }
4281 }
4282
4283
4284
4285 addF("runtime/internal/sys", "Prefetch", makePrefetchFunc(ssa.OpPrefetchCache),
4286 sys.AMD64, sys.ARM64, sys.PPC64)
4287 addF("runtime/internal/sys", "PrefetchStreamed", makePrefetchFunc(ssa.OpPrefetchCacheStreamed),
4288 sys.AMD64, sys.ARM64, sys.PPC64)
4289
4290
4291 addF("runtime/internal/atomic", "Load",
4292 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4293 v := s.newValue2(ssa.OpAtomicLoad32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4294 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4295 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4296 },
4297 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4298 addF("runtime/internal/atomic", "Load8",
4299 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4300 v := s.newValue2(ssa.OpAtomicLoad8, types.NewTuple(types.Types[types.TUINT8], types.TypeMem), args[0], s.mem())
4301 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4302 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT8], v)
4303 },
4304 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4305 addF("runtime/internal/atomic", "Load64",
4306 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4307 v := s.newValue2(ssa.OpAtomicLoad64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4308 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4309 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4310 },
4311 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4312 addF("runtime/internal/atomic", "LoadAcq",
4313 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4314 v := s.newValue2(ssa.OpAtomicLoadAcq32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], s.mem())
4315 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4316 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4317 },
4318 sys.PPC64, sys.S390X)
4319 addF("runtime/internal/atomic", "LoadAcq64",
4320 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4321 v := s.newValue2(ssa.OpAtomicLoadAcq64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], s.mem())
4322 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4323 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4324 },
4325 sys.PPC64)
4326 addF("runtime/internal/atomic", "Loadp",
4327 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4328 v := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(s.f.Config.Types.BytePtr, types.TypeMem), args[0], s.mem())
4329 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4330 return s.newValue1(ssa.OpSelect0, s.f.Config.Types.BytePtr, v)
4331 },
4332 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4333
4334 addF("runtime/internal/atomic", "Store",
4335 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4336 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore32, types.TypeMem, args[0], args[1], s.mem())
4337 return nil
4338 },
4339 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4340 addF("runtime/internal/atomic", "Store8",
4341 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4342 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore8, types.TypeMem, args[0], args[1], s.mem())
4343 return nil
4344 },
4345 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4346 addF("runtime/internal/atomic", "Store64",
4347 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4348 s.vars[memVar] = s.newValue3(ssa.OpAtomicStore64, types.TypeMem, args[0], args[1], s.mem())
4349 return nil
4350 },
4351 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4352 addF("runtime/internal/atomic", "StorepNoWB",
4353 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4354 s.vars[memVar] = s.newValue3(ssa.OpAtomicStorePtrNoWB, types.TypeMem, args[0], args[1], s.mem())
4355 return nil
4356 },
4357 sys.AMD64, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.RISCV64, sys.S390X)
4358 addF("runtime/internal/atomic", "StoreRel",
4359 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4360 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel32, types.TypeMem, args[0], args[1], s.mem())
4361 return nil
4362 },
4363 sys.PPC64, sys.S390X)
4364 addF("runtime/internal/atomic", "StoreRel64",
4365 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4366 s.vars[memVar] = s.newValue3(ssa.OpAtomicStoreRel64, types.TypeMem, args[0], args[1], s.mem())
4367 return nil
4368 },
4369 sys.PPC64)
4370
4371 addF("runtime/internal/atomic", "Xchg",
4372 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4373 v := s.newValue3(ssa.OpAtomicExchange32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4374 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4375 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4376 },
4377 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4378 addF("runtime/internal/atomic", "Xchg64",
4379 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4380 v := s.newValue3(ssa.OpAtomicExchange64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4381 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4382 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4383 },
4384 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4385
4386 type atomicOpEmitter func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind)
4387
4388 makeAtomicGuardedIntrinsicARM64 := func(op0, op1 ssa.Op, typ, rtyp types.Kind, emit atomicOpEmitter) intrinsicBuilder {
4389
4390 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4391
4392 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARM64HasATOMICS, s.sb)
4393 v := s.load(types.Types[types.TBOOL], addr)
4394 b := s.endBlock()
4395 b.Kind = ssa.BlockIf
4396 b.SetControl(v)
4397 bTrue := s.f.NewBlock(ssa.BlockPlain)
4398 bFalse := s.f.NewBlock(ssa.BlockPlain)
4399 bEnd := s.f.NewBlock(ssa.BlockPlain)
4400 b.AddEdgeTo(bTrue)
4401 b.AddEdgeTo(bFalse)
4402 b.Likely = ssa.BranchLikely
4403
4404
4405 s.startBlock(bTrue)
4406 emit(s, n, args, op1, typ)
4407 s.endBlock().AddEdgeTo(bEnd)
4408
4409
4410 s.startBlock(bFalse)
4411 emit(s, n, args, op0, typ)
4412 s.endBlock().AddEdgeTo(bEnd)
4413
4414
4415 s.startBlock(bEnd)
4416 if rtyp == types.TNIL {
4417 return nil
4418 } else {
4419 return s.variable(n, types.Types[rtyp])
4420 }
4421 }
4422 }
4423
4424 atomicXchgXaddEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4425 v := s.newValue3(op, types.NewTuple(types.Types[typ], types.TypeMem), args[0], args[1], s.mem())
4426 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4427 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4428 }
4429 addF("runtime/internal/atomic", "Xchg",
4430 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange32, ssa.OpAtomicExchange32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4431 sys.ARM64)
4432 addF("runtime/internal/atomic", "Xchg64",
4433 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicExchange64, ssa.OpAtomicExchange64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4434 sys.ARM64)
4435
4436 addF("runtime/internal/atomic", "Xadd",
4437 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4438 v := s.newValue3(ssa.OpAtomicAdd32, types.NewTuple(types.Types[types.TUINT32], types.TypeMem), args[0], args[1], s.mem())
4439 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4440 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT32], v)
4441 },
4442 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4443 addF("runtime/internal/atomic", "Xadd64",
4444 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4445 v := s.newValue3(ssa.OpAtomicAdd64, types.NewTuple(types.Types[types.TUINT64], types.TypeMem), args[0], args[1], s.mem())
4446 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4447 return s.newValue1(ssa.OpSelect0, types.Types[types.TUINT64], v)
4448 },
4449 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4450
4451 addF("runtime/internal/atomic", "Xadd",
4452 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd32, ssa.OpAtomicAdd32Variant, types.TUINT32, types.TUINT32, atomicXchgXaddEmitterARM64),
4453 sys.ARM64)
4454 addF("runtime/internal/atomic", "Xadd64",
4455 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAdd64, ssa.OpAtomicAdd64Variant, types.TUINT64, types.TUINT64, atomicXchgXaddEmitterARM64),
4456 sys.ARM64)
4457
4458 addF("runtime/internal/atomic", "Cas",
4459 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4460 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4461 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4462 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4463 },
4464 sys.AMD64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4465 addF("runtime/internal/atomic", "Cas64",
4466 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4467 v := s.newValue4(ssa.OpAtomicCompareAndSwap64, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4468 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4469 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4470 },
4471 sys.AMD64, sys.Loong64, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4472 addF("runtime/internal/atomic", "CasRel",
4473 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4474 v := s.newValue4(ssa.OpAtomicCompareAndSwap32, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4475 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4476 return s.newValue1(ssa.OpSelect0, types.Types[types.TBOOL], v)
4477 },
4478 sys.PPC64)
4479
4480 atomicCasEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4481 v := s.newValue4(op, types.NewTuple(types.Types[types.TBOOL], types.TypeMem), args[0], args[1], args[2], s.mem())
4482 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, v)
4483 s.vars[n] = s.newValue1(ssa.OpSelect0, types.Types[typ], v)
4484 }
4485
4486 addF("runtime/internal/atomic", "Cas",
4487 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap32, ssa.OpAtomicCompareAndSwap32Variant, types.TUINT32, types.TBOOL, atomicCasEmitterARM64),
4488 sys.ARM64)
4489 addF("runtime/internal/atomic", "Cas64",
4490 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicCompareAndSwap64, ssa.OpAtomicCompareAndSwap64Variant, types.TUINT64, types.TBOOL, atomicCasEmitterARM64),
4491 sys.ARM64)
4492
4493 addF("runtime/internal/atomic", "And8",
4494 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4495 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd8, types.TypeMem, args[0], args[1], s.mem())
4496 return nil
4497 },
4498 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4499 addF("runtime/internal/atomic", "And",
4500 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4501 s.vars[memVar] = s.newValue3(ssa.OpAtomicAnd32, types.TypeMem, args[0], args[1], s.mem())
4502 return nil
4503 },
4504 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4505 addF("runtime/internal/atomic", "Or8",
4506 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4507 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr8, types.TypeMem, args[0], args[1], s.mem())
4508 return nil
4509 },
4510 sys.AMD64, sys.ARM64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4511 addF("runtime/internal/atomic", "Or",
4512 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4513 s.vars[memVar] = s.newValue3(ssa.OpAtomicOr32, types.TypeMem, args[0], args[1], s.mem())
4514 return nil
4515 },
4516 sys.AMD64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X)
4517
4518 atomicAndOrEmitterARM64 := func(s *state, n *ir.CallExpr, args []*ssa.Value, op ssa.Op, typ types.Kind) {
4519 s.vars[memVar] = s.newValue3(op, types.TypeMem, args[0], args[1], s.mem())
4520 }
4521
4522 addF("runtime/internal/atomic", "And8",
4523 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd8, ssa.OpAtomicAnd8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4524 sys.ARM64)
4525 addF("runtime/internal/atomic", "And",
4526 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicAnd32, ssa.OpAtomicAnd32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4527 sys.ARM64)
4528 addF("runtime/internal/atomic", "Or8",
4529 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr8, ssa.OpAtomicOr8Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4530 sys.ARM64)
4531 addF("runtime/internal/atomic", "Or",
4532 makeAtomicGuardedIntrinsicARM64(ssa.OpAtomicOr32, ssa.OpAtomicOr32Variant, types.TNIL, types.TNIL, atomicAndOrEmitterARM64),
4533 sys.ARM64)
4534
4535
4536 alias("runtime/internal/atomic", "Loadint32", "runtime/internal/atomic", "Load", all...)
4537 alias("runtime/internal/atomic", "Loadint64", "runtime/internal/atomic", "Load64", all...)
4538 alias("runtime/internal/atomic", "Loaduintptr", "runtime/internal/atomic", "Load", p4...)
4539 alias("runtime/internal/atomic", "Loaduintptr", "runtime/internal/atomic", "Load64", p8...)
4540 alias("runtime/internal/atomic", "Loaduint", "runtime/internal/atomic", "Load", p4...)
4541 alias("runtime/internal/atomic", "Loaduint", "runtime/internal/atomic", "Load64", p8...)
4542 alias("runtime/internal/atomic", "LoadAcq", "runtime/internal/atomic", "Load", lwatomics...)
4543 alias("runtime/internal/atomic", "LoadAcq64", "runtime/internal/atomic", "Load64", lwatomics...)
4544 alias("runtime/internal/atomic", "LoadAcquintptr", "runtime/internal/atomic", "LoadAcq", p4...)
4545 alias("sync", "runtime_LoadAcquintptr", "runtime/internal/atomic", "LoadAcq", p4...)
4546 alias("runtime/internal/atomic", "LoadAcquintptr", "runtime/internal/atomic", "LoadAcq64", p8...)
4547 alias("sync", "runtime_LoadAcquintptr", "runtime/internal/atomic", "LoadAcq64", p8...)
4548
4549
4550 alias("runtime/internal/atomic", "Storeint32", "runtime/internal/atomic", "Store", all...)
4551 alias("runtime/internal/atomic", "Storeint64", "runtime/internal/atomic", "Store64", all...)
4552 alias("runtime/internal/atomic", "Storeuintptr", "runtime/internal/atomic", "Store", p4...)
4553 alias("runtime/internal/atomic", "Storeuintptr", "runtime/internal/atomic", "Store64", p8...)
4554 alias("runtime/internal/atomic", "StoreRel", "runtime/internal/atomic", "Store", lwatomics...)
4555 alias("runtime/internal/atomic", "StoreRel64", "runtime/internal/atomic", "Store64", lwatomics...)
4556 alias("runtime/internal/atomic", "StoreReluintptr", "runtime/internal/atomic", "StoreRel", p4...)
4557 alias("sync", "runtime_StoreReluintptr", "runtime/internal/atomic", "StoreRel", p4...)
4558 alias("runtime/internal/atomic", "StoreReluintptr", "runtime/internal/atomic", "StoreRel64", p8...)
4559 alias("sync", "runtime_StoreReluintptr", "runtime/internal/atomic", "StoreRel64", p8...)
4560
4561
4562 alias("runtime/internal/atomic", "Xchgint32", "runtime/internal/atomic", "Xchg", all...)
4563 alias("runtime/internal/atomic", "Xchgint64", "runtime/internal/atomic", "Xchg64", all...)
4564 alias("runtime/internal/atomic", "Xchguintptr", "runtime/internal/atomic", "Xchg", p4...)
4565 alias("runtime/internal/atomic", "Xchguintptr", "runtime/internal/atomic", "Xchg64", p8...)
4566
4567
4568 alias("runtime/internal/atomic", "Xaddint32", "runtime/internal/atomic", "Xadd", all...)
4569 alias("runtime/internal/atomic", "Xaddint64", "runtime/internal/atomic", "Xadd64", all...)
4570 alias("runtime/internal/atomic", "Xadduintptr", "runtime/internal/atomic", "Xadd", p4...)
4571 alias("runtime/internal/atomic", "Xadduintptr", "runtime/internal/atomic", "Xadd64", p8...)
4572
4573
4574 alias("runtime/internal/atomic", "Casint32", "runtime/internal/atomic", "Cas", all...)
4575 alias("runtime/internal/atomic", "Casint64", "runtime/internal/atomic", "Cas64", all...)
4576 alias("runtime/internal/atomic", "Casuintptr", "runtime/internal/atomic", "Cas", p4...)
4577 alias("runtime/internal/atomic", "Casuintptr", "runtime/internal/atomic", "Cas64", p8...)
4578 alias("runtime/internal/atomic", "Casp1", "runtime/internal/atomic", "Cas", p4...)
4579 alias("runtime/internal/atomic", "Casp1", "runtime/internal/atomic", "Cas64", p8...)
4580 alias("runtime/internal/atomic", "CasRel", "runtime/internal/atomic", "Cas", lwatomics...)
4581
4582
4583 addF("math", "sqrt",
4584 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4585 return s.newValue1(ssa.OpSqrt, types.Types[types.TFLOAT64], args[0])
4586 },
4587 sys.I386, sys.AMD64, sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm)
4588 addF("math", "Trunc",
4589 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4590 return s.newValue1(ssa.OpTrunc, types.Types[types.TFLOAT64], args[0])
4591 },
4592 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4593 addF("math", "Ceil",
4594 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4595 return s.newValue1(ssa.OpCeil, types.Types[types.TFLOAT64], args[0])
4596 },
4597 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4598 addF("math", "Floor",
4599 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4600 return s.newValue1(ssa.OpFloor, types.Types[types.TFLOAT64], args[0])
4601 },
4602 sys.ARM64, sys.PPC64, sys.S390X, sys.Wasm)
4603 addF("math", "Round",
4604 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4605 return s.newValue1(ssa.OpRound, types.Types[types.TFLOAT64], args[0])
4606 },
4607 sys.ARM64, sys.PPC64, sys.S390X)
4608 addF("math", "RoundToEven",
4609 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4610 return s.newValue1(ssa.OpRoundToEven, types.Types[types.TFLOAT64], args[0])
4611 },
4612 sys.ARM64, sys.S390X, sys.Wasm)
4613 addF("math", "Abs",
4614 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4615 return s.newValue1(ssa.OpAbs, types.Types[types.TFLOAT64], args[0])
4616 },
4617 sys.ARM64, sys.ARM, sys.PPC64, sys.RISCV64, sys.Wasm, sys.MIPS, sys.MIPS64)
4618 addF("math", "Copysign",
4619 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4620 return s.newValue2(ssa.OpCopysign, types.Types[types.TFLOAT64], args[0], args[1])
4621 },
4622 sys.PPC64, sys.RISCV64, sys.Wasm)
4623 addF("math", "FMA",
4624 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4625 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4626 },
4627 sys.ARM64, sys.PPC64, sys.RISCV64, sys.S390X)
4628 addF("math", "FMA",
4629 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4630 if !s.config.UseFMA {
4631 s.vars[n] = s.callResult(n, callNormal)
4632 return s.variable(n, types.Types[types.TFLOAT64])
4633 }
4634
4635 if buildcfg.GOAMD64 >= 3 {
4636 return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4637 }
4638
4639 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasFMA)
4640 b := s.endBlock()
4641 b.Kind = ssa.BlockIf
4642 b.SetControl(v)
4643 bTrue := s.f.NewBlock(ssa.BlockPlain)
4644 bFalse := s.f.NewBlock(ssa.BlockPlain)
4645 bEnd := s.f.NewBlock(ssa.BlockPlain)
4646 b.AddEdgeTo(bTrue)
4647 b.AddEdgeTo(bFalse)
4648 b.Likely = ssa.BranchLikely
4649
4650
4651 s.startBlock(bTrue)
4652 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4653 s.endBlock().AddEdgeTo(bEnd)
4654
4655
4656 s.startBlock(bFalse)
4657 s.vars[n] = s.callResult(n, callNormal)
4658 s.endBlock().AddEdgeTo(bEnd)
4659
4660
4661 s.startBlock(bEnd)
4662 return s.variable(n, types.Types[types.TFLOAT64])
4663 },
4664 sys.AMD64)
4665 addF("math", "FMA",
4666 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4667 if !s.config.UseFMA {
4668 s.vars[n] = s.callResult(n, callNormal)
4669 return s.variable(n, types.Types[types.TFLOAT64])
4670 }
4671 addr := s.entryNewValue1A(ssa.OpAddr, types.Types[types.TBOOL].PtrTo(), ir.Syms.ARMHasVFPv4, s.sb)
4672 v := s.load(types.Types[types.TBOOL], addr)
4673 b := s.endBlock()
4674 b.Kind = ssa.BlockIf
4675 b.SetControl(v)
4676 bTrue := s.f.NewBlock(ssa.BlockPlain)
4677 bFalse := s.f.NewBlock(ssa.BlockPlain)
4678 bEnd := s.f.NewBlock(ssa.BlockPlain)
4679 b.AddEdgeTo(bTrue)
4680 b.AddEdgeTo(bFalse)
4681 b.Likely = ssa.BranchLikely
4682
4683
4684 s.startBlock(bTrue)
4685 s.vars[n] = s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
4686 s.endBlock().AddEdgeTo(bEnd)
4687
4688
4689 s.startBlock(bFalse)
4690 s.vars[n] = s.callResult(n, callNormal)
4691 s.endBlock().AddEdgeTo(bEnd)
4692
4693
4694 s.startBlock(bEnd)
4695 return s.variable(n, types.Types[types.TFLOAT64])
4696 },
4697 sys.ARM)
4698
4699 makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4700 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4701 if buildcfg.GOAMD64 >= 2 {
4702 return s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4703 }
4704
4705 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasSSE41)
4706 b := s.endBlock()
4707 b.Kind = ssa.BlockIf
4708 b.SetControl(v)
4709 bTrue := s.f.NewBlock(ssa.BlockPlain)
4710 bFalse := s.f.NewBlock(ssa.BlockPlain)
4711 bEnd := s.f.NewBlock(ssa.BlockPlain)
4712 b.AddEdgeTo(bTrue)
4713 b.AddEdgeTo(bFalse)
4714 b.Likely = ssa.BranchLikely
4715
4716
4717 s.startBlock(bTrue)
4718 s.vars[n] = s.newValue1(op, types.Types[types.TFLOAT64], args[0])
4719 s.endBlock().AddEdgeTo(bEnd)
4720
4721
4722 s.startBlock(bFalse)
4723 s.vars[n] = s.callResult(n, callNormal)
4724 s.endBlock().AddEdgeTo(bEnd)
4725
4726
4727 s.startBlock(bEnd)
4728 return s.variable(n, types.Types[types.TFLOAT64])
4729 }
4730 }
4731 addF("math", "RoundToEven",
4732 makeRoundAMD64(ssa.OpRoundToEven),
4733 sys.AMD64)
4734 addF("math", "Floor",
4735 makeRoundAMD64(ssa.OpFloor),
4736 sys.AMD64)
4737 addF("math", "Ceil",
4738 makeRoundAMD64(ssa.OpCeil),
4739 sys.AMD64)
4740 addF("math", "Trunc",
4741 makeRoundAMD64(ssa.OpTrunc),
4742 sys.AMD64)
4743
4744
4745 addF("math/bits", "TrailingZeros64",
4746 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4747 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], args[0])
4748 },
4749 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4750 addF("math/bits", "TrailingZeros32",
4751 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4752 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], args[0])
4753 },
4754 sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4755 addF("math/bits", "TrailingZeros16",
4756 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4757 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4758 c := s.constInt32(types.Types[types.TUINT32], 1<<16)
4759 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4760 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4761 },
4762 sys.MIPS)
4763 addF("math/bits", "TrailingZeros16",
4764 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4765 return s.newValue1(ssa.OpCtz16, types.Types[types.TINT], args[0])
4766 },
4767 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4768 addF("math/bits", "TrailingZeros16",
4769 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4770 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4771 c := s.constInt64(types.Types[types.TUINT64], 1<<16)
4772 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4773 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4774 },
4775 sys.S390X, sys.PPC64)
4776 addF("math/bits", "TrailingZeros8",
4777 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4778 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4779 c := s.constInt32(types.Types[types.TUINT32], 1<<8)
4780 y := s.newValue2(ssa.OpOr32, types.Types[types.TUINT32], x, c)
4781 return s.newValue1(ssa.OpCtz32, types.Types[types.TINT], y)
4782 },
4783 sys.MIPS)
4784 addF("math/bits", "TrailingZeros8",
4785 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4786 return s.newValue1(ssa.OpCtz8, types.Types[types.TINT], args[0])
4787 },
4788 sys.AMD64, sys.I386, sys.ARM, sys.ARM64, sys.Wasm)
4789 addF("math/bits", "TrailingZeros8",
4790 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4791 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4792 c := s.constInt64(types.Types[types.TUINT64], 1<<8)
4793 y := s.newValue2(ssa.OpOr64, types.Types[types.TUINT64], x, c)
4794 return s.newValue1(ssa.OpCtz64, types.Types[types.TINT], y)
4795 },
4796 sys.S390X)
4797 alias("math/bits", "ReverseBytes64", "runtime/internal/sys", "Bswap64", all...)
4798 alias("math/bits", "ReverseBytes32", "runtime/internal/sys", "Bswap32", all...)
4799
4800
4801
4802 if buildcfg.GOPPC64 >= 10 {
4803 addF("math/bits", "ReverseBytes16",
4804 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4805 return s.newValue1(ssa.OpBswap16, types.Types[types.TUINT], args[0])
4806 },
4807 sys.PPC64)
4808 }
4809
4810 addF("math/bits", "Len64",
4811 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4812 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4813 },
4814 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4815 addF("math/bits", "Len32",
4816 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4817 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4818 },
4819 sys.AMD64, sys.ARM64, sys.PPC64)
4820 addF("math/bits", "Len32",
4821 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4822 if s.config.PtrSize == 4 {
4823 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4824 }
4825 x := s.newValue1(ssa.OpZeroExt32to64, types.Types[types.TUINT64], args[0])
4826 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4827 },
4828 sys.ARM, sys.S390X, sys.MIPS, sys.Wasm)
4829 addF("math/bits", "Len16",
4830 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4831 if s.config.PtrSize == 4 {
4832 x := s.newValue1(ssa.OpZeroExt16to32, types.Types[types.TUINT32], args[0])
4833 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4834 }
4835 x := s.newValue1(ssa.OpZeroExt16to64, types.Types[types.TUINT64], args[0])
4836 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4837 },
4838 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4839 addF("math/bits", "Len16",
4840 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4841 return s.newValue1(ssa.OpBitLen16, types.Types[types.TINT], args[0])
4842 },
4843 sys.AMD64)
4844 addF("math/bits", "Len8",
4845 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4846 if s.config.PtrSize == 4 {
4847 x := s.newValue1(ssa.OpZeroExt8to32, types.Types[types.TUINT32], args[0])
4848 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], x)
4849 }
4850 x := s.newValue1(ssa.OpZeroExt8to64, types.Types[types.TUINT64], args[0])
4851 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], x)
4852 },
4853 sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4854 addF("math/bits", "Len8",
4855 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4856 return s.newValue1(ssa.OpBitLen8, types.Types[types.TINT], args[0])
4857 },
4858 sys.AMD64)
4859 addF("math/bits", "Len",
4860 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4861 if s.config.PtrSize == 4 {
4862 return s.newValue1(ssa.OpBitLen32, types.Types[types.TINT], args[0])
4863 }
4864 return s.newValue1(ssa.OpBitLen64, types.Types[types.TINT], args[0])
4865 },
4866 sys.AMD64, sys.ARM64, sys.ARM, sys.S390X, sys.MIPS, sys.PPC64, sys.Wasm)
4867
4868 addF("math/bits", "Reverse64",
4869 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4870 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4871 },
4872 sys.ARM64)
4873 addF("math/bits", "Reverse32",
4874 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4875 return s.newValue1(ssa.OpBitRev32, types.Types[types.TINT], args[0])
4876 },
4877 sys.ARM64)
4878 addF("math/bits", "Reverse16",
4879 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4880 return s.newValue1(ssa.OpBitRev16, types.Types[types.TINT], args[0])
4881 },
4882 sys.ARM64)
4883 addF("math/bits", "Reverse8",
4884 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4885 return s.newValue1(ssa.OpBitRev8, types.Types[types.TINT], args[0])
4886 },
4887 sys.ARM64)
4888 addF("math/bits", "Reverse",
4889 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4890 return s.newValue1(ssa.OpBitRev64, types.Types[types.TINT], args[0])
4891 },
4892 sys.ARM64)
4893 addF("math/bits", "RotateLeft8",
4894 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4895 return s.newValue2(ssa.OpRotateLeft8, types.Types[types.TUINT8], args[0], args[1])
4896 },
4897 sys.AMD64)
4898 addF("math/bits", "RotateLeft16",
4899 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4900 return s.newValue2(ssa.OpRotateLeft16, types.Types[types.TUINT16], args[0], args[1])
4901 },
4902 sys.AMD64)
4903 addF("math/bits", "RotateLeft32",
4904 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4905 return s.newValue2(ssa.OpRotateLeft32, types.Types[types.TUINT32], args[0], args[1])
4906 },
4907 sys.AMD64, sys.ARM, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm, sys.Loong64)
4908 addF("math/bits", "RotateLeft64",
4909 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4910 return s.newValue2(ssa.OpRotateLeft64, types.Types[types.TUINT64], args[0], args[1])
4911 },
4912 sys.AMD64, sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm, sys.Loong64)
4913 alias("math/bits", "RotateLeft", "math/bits", "RotateLeft64", p8...)
4914
4915 makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4916 return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4917 if buildcfg.GOAMD64 >= 2 {
4918 return s.newValue1(op, types.Types[types.TINT], args[0])
4919 }
4920
4921 v := s.entryNewValue0A(ssa.OpHasCPUFeature, types.Types[types.TBOOL], ir.Syms.X86HasPOPCNT)
4922 b := s.endBlock()
4923 b.Kind = ssa.BlockIf
4924 b.SetControl(v)
4925 bTrue := s.f.NewBlock(ssa.BlockPlain)
4926 bFalse := s.f.NewBlock(ssa.BlockPlain)
4927 bEnd := s.f.NewBlock(ssa.BlockPlain)
4928 b.AddEdgeTo(bTrue)
4929 b.AddEdgeTo(bFalse)
4930 b.Likely = ssa.BranchLikely
4931
4932
4933 s.startBlock(bTrue)
4934 s.vars[n] = s.newValue1(op, types.Types[types.TINT], args[0])
4935 s.endBlock().AddEdgeTo(bEnd)
4936
4937
4938 s.startBlock(bFalse)
4939 s.vars[n] = s.callResult(n, callNormal)
4940 s.endBlock().AddEdgeTo(bEnd)
4941
4942
4943 s.startBlock(bEnd)
4944 return s.variable(n, types.Types[types.TINT])
4945 }
4946 }
4947 addF("math/bits", "OnesCount64",
4948 makeOnesCountAMD64(ssa.OpPopCount64),
4949 sys.AMD64)
4950 addF("math/bits", "OnesCount64",
4951 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4952 return s.newValue1(ssa.OpPopCount64, types.Types[types.TINT], args[0])
4953 },
4954 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4955 addF("math/bits", "OnesCount32",
4956 makeOnesCountAMD64(ssa.OpPopCount32),
4957 sys.AMD64)
4958 addF("math/bits", "OnesCount32",
4959 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4960 return s.newValue1(ssa.OpPopCount32, types.Types[types.TINT], args[0])
4961 },
4962 sys.PPC64, sys.ARM64, sys.S390X, sys.Wasm)
4963 addF("math/bits", "OnesCount16",
4964 makeOnesCountAMD64(ssa.OpPopCount16),
4965 sys.AMD64)
4966 addF("math/bits", "OnesCount16",
4967 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4968 return s.newValue1(ssa.OpPopCount16, types.Types[types.TINT], args[0])
4969 },
4970 sys.ARM64, sys.S390X, sys.PPC64, sys.Wasm)
4971 addF("math/bits", "OnesCount8",
4972 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4973 return s.newValue1(ssa.OpPopCount8, types.Types[types.TINT], args[0])
4974 },
4975 sys.S390X, sys.PPC64, sys.Wasm)
4976 addF("math/bits", "OnesCount",
4977 makeOnesCountAMD64(ssa.OpPopCount64),
4978 sys.AMD64)
4979 addF("math/bits", "Mul64",
4980 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4981 return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
4982 },
4983 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64, sys.RISCV64, sys.Loong64)
4984 alias("math/bits", "Mul", "math/bits", "Mul64", p8...)
4985 alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", p8...)
4986 addF("math/bits", "Add64",
4987 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4988 return s.newValue3(ssa.OpAdd64carry, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
4989 },
4990 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
4991 alias("math/bits", "Add", "math/bits", "Add64", p8...)
4992 alias("runtime/internal/math", "Add64", "math/bits", "Add64", all...)
4993 addF("math/bits", "Sub64",
4994 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
4995 return s.newValue3(ssa.OpSub64borrow, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
4996 },
4997 sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.RISCV64, sys.Loong64, sys.MIPS64)
4998 alias("math/bits", "Sub", "math/bits", "Sub64", p8...)
4999 addF("math/bits", "Div64",
5000 func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
5001
5002 cmpZero := s.newValue2(s.ssaOp(ir.ONE, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[2], s.zeroVal(types.Types[types.TUINT64]))
5003 s.check(cmpZero, ir.Syms.Panicdivide)
5004 cmpOverflow := s.newValue2(s.ssaOp(ir.OLT, types.Types[types.TUINT64]), types.Types[types.TBOOL], args[0], args[2])
5005 s.check(cmpOverflow, ir.Syms.Panicoverflow)
5006 return s.newValue3(ssa.OpDiv128u, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
5007 },
5008 sys.AMD64)
5009 alias("math/bits", "Div", "math/bits", "Div64", sys.ArchAMD64)
5010
5011 alias("runtime/internal/sys", "TrailingZeros8", "math/bits", "TrailingZeros8", all...)
5012 alias("runtime/internal/sys", "TrailingZeros32", "math/bits", "TrailingZeros32", all...)
5013 alias("runtime/internal/sys", "TrailingZeros64", "math/bits", "TrailingZeros64", all...)
5014 alias("runtime/internal/sys", "Len8", "math/bits", "Len8", all...)
5015 alias("runtime/internal/sys", "Len64", "math/bits", "Len64", all...)
5016 alias("runtime/internal/sys", "OnesCount64", "math/bits", "OnesCount64", all...)
5017
5018
5019
5020
5021 alias("sync/atomic", "LoadInt32", "runtime/internal/atomic", "Load", all...)
5022 alias("sync/atomic", "LoadInt64", "runtime/internal/atomic", "Load64", all...)
5023 alias("sync/atomic", "LoadPointer", "runtime/internal/atomic", "Loadp", all...)
5024 alias("sync/atomic", "LoadUint32", "runtime/internal/atomic", "Load", all...)
5025 alias("sync/atomic", "LoadUint64", "runtime/internal/atomic", "Load64", all...)
5026 alias("sync/atomic", "LoadUintptr", "runtime/internal/atomic", "Load", p4...)
5027 alias("sync/atomic", "LoadUintptr", "runtime/internal/atomic", "Load64", p8...)
5028
5029 alias("sync/atomic", "StoreInt32", "runtime/internal/atomic", "Store", all...)
5030 alias("sync/atomic", "StoreInt64", "runtime/internal/atomic", "Store64", all...)
5031
5032 alias("sync/atomic", "StoreUint32", "runtime/internal/atomic", "Store", all...)
5033 alias("sync/atomic", "StoreUint64", "runtime/internal/atomic", "Store64", all...)
5034 alias("sync/atomic", "StoreUintptr", "runtime/internal/atomic", "Store", p4...)
5035 alias("sync/atomic", "StoreUintptr", "runtime/internal/atomic", "Store64", p8...)
5036
5037 alias("sync/atomic", "SwapInt32", "runtime/internal/atomic", "Xchg", all...)
5038 alias("sync/atomic", "SwapInt64", "runtime/internal/atomic", "Xchg64", all...)
5039 alias("sync/atomic", "SwapUint32", "runtime/internal/atomic", "Xchg", all...)
5040 alias("sync/atomic", "SwapUint64", "runtime/internal/atomic", "Xchg64", all...)
5041 alias("sync/atomic", "SwapUintptr", "runtime/internal/atomic", "Xchg", p4...)
5042 alias("sync/atomic", "SwapUintptr", "runtime/internal/atomic", "Xchg64", p8...)
5043
5044 alias("sync/atomic", "CompareAndSwapInt32", "runtime/internal/atomic", "Cas", all...)
5045 alias("sync/atomic", "CompareAndSwapInt64", "runtime/internal/atomic", "Cas64", all...)
5046 alias("sync/atomic", "CompareAndSwapUint32", "runtime/internal/atomic", "Cas", all...)
5047 alias("sync/atomic", "CompareAndSwapUint64", "runtime/internal/atomic", "Cas64", all...)
5048 alias("sync/atomic", "CompareAndSwapUintptr", "runtime/internal/atomic", "Cas", p4...)
5049 alias("sync/atomic", "CompareAndSwapUintptr", "runtime/internal/atomic", "Cas64", p8...)
5050
5051 alias("sync/atomic", "AddInt32", "runtime/internal/atomic", "Xadd", all...)
5052 alias("sync/atomic", "AddInt64", "runtime/internal/atomic", "Xadd64", all...)
5053 alias("sync/atomic", "AddUint32", "runtime/internal/atomic", "Xadd", all...)
5054 alias("sync/atomic", "AddUint64", "runtime/internal/atomic", "Xadd64", all...)
5055 alias("sync/atomic", "AddUintptr", "runtime/internal/atomic", "Xadd", p4...)
5056 alias("sync/atomic", "AddUintptr", "runtime/internal/atomic", "Xadd64", p8...)
5057
5058
5059 alias("math/big", "mulWW", "math/bits", "Mul64", p8...)
5060 }
5061
5062
5063
5064 func findIntrinsic(sym *types.Sym) intrinsicBuilder {
5065 if sym == nil || sym.Pkg == nil {
5066 return nil
5067 }
5068 pkg := sym.Pkg.Path
5069 if sym.Pkg == ir.Pkgs.Runtime {
5070 pkg = "runtime"
5071 }
5072 if base.Flag.Race && pkg == "sync/atomic" {
5073
5074
5075 return nil
5076 }
5077
5078
5079 if Arch.SoftFloat && pkg == "math" {
5080 return nil
5081 }
5082
5083 fn := sym.Name
5084 if ssa.IntrinsicsDisable {
5085 if pkg == "runtime" && (fn == "getcallerpc" || fn == "getcallersp" || fn == "getclosureptr") {
5086
5087 } else {
5088 return nil
5089 }
5090 }
5091 return intrinsics[intrinsicKey{Arch.LinkArch.Arch, pkg, fn}]
5092 }
5093
5094 func IsIntrinsicCall(n *ir.CallExpr) bool {
5095 if n == nil {
5096 return false
5097 }
5098 name, ok := n.Fun.(*ir.Name)
5099 if !ok {
5100 return false
5101 }
5102 return findIntrinsic(name.Sym()) != nil
5103 }
5104
5105
5106 func (s *state) intrinsicCall(n *ir.CallExpr) *ssa.Value {
5107 v := findIntrinsic(n.Fun.Sym())(s, n, s.intrinsicArgs(n))
5108 if ssa.IntrinsicsDebug > 0 {
5109 x := v
5110 if x == nil {
5111 x = s.mem()
5112 }
5113 if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 {
5114 x = x.Args[0]
5115 }
5116 base.WarnfAt(n.Pos(), "intrinsic substitution for %v with %s", n.Fun.Sym().Name, x.LongString())
5117 }
5118 return v
5119 }
5120
5121
5122 func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
5123 args := make([]*ssa.Value, len(n.Args))
5124 for i, n := range n.Args {
5125 args[i] = s.expr(n)
5126 }
5127 return args
5128 }
5129
5130
5131
5132
5133
5134
5135
5136 func (s *state) openDeferRecord(n *ir.CallExpr) {
5137 if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.Fun.Type().NumResults() != 0 {
5138 s.Fatalf("defer call with arguments or results: %v", n)
5139 }
5140
5141 opendefer := &openDeferInfo{
5142 n: n,
5143 }
5144 fn := n.Fun
5145
5146
5147
5148 closureVal := s.expr(fn)
5149 closure := s.openDeferSave(fn.Type(), closureVal)
5150 opendefer.closureNode = closure.Aux.(*ir.Name)
5151 if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
5152 opendefer.closure = closure
5153 }
5154 index := len(s.openDefers)
5155 s.openDefers = append(s.openDefers, opendefer)
5156
5157
5158
5159 bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
5160 newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
5161 s.vars[deferBitsVar] = newDeferBits
5162 s.store(types.Types[types.TUINT8], s.deferBitsAddr, newDeferBits)
5163 }
5164
5165
5166
5167
5168
5169
5170 func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
5171 if !ssa.CanSSA(t) {
5172 s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
5173 }
5174 if !t.HasPointers() {
5175 s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
5176 }
5177 pos := val.Pos
5178 temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
5179 temp.SetOpenDeferSlot(true)
5180 temp.SetFrameOffset(int64(len(s.openDefers)))
5181 var addrTemp *ssa.Value
5182
5183
5184 if s.curBlock.ID != s.f.Entry.ID {
5185
5186
5187
5188 if t.HasPointers() {
5189 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5190 }
5191 s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
5192 addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
5193 } else {
5194
5195
5196
5197 if t.HasPointers() {
5198 s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
5199 }
5200 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
5201 addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
5202 }
5203
5204
5205
5206
5207
5208 temp.SetNeedzero(true)
5209
5210
5211 s.store(t, addrTemp, val)
5212 return addrTemp
5213 }
5214
5215
5216
5217
5218
5219 func (s *state) openDeferExit() {
5220 deferExit := s.f.NewBlock(ssa.BlockPlain)
5221 s.endBlock().AddEdgeTo(deferExit)
5222 s.startBlock(deferExit)
5223 s.lastDeferExit = deferExit
5224 s.lastDeferCount = len(s.openDefers)
5225 zeroval := s.constInt8(types.Types[types.TUINT8], 0)
5226
5227 for i := len(s.openDefers) - 1; i >= 0; i-- {
5228 r := s.openDefers[i]
5229 bCond := s.f.NewBlock(ssa.BlockPlain)
5230 bEnd := s.f.NewBlock(ssa.BlockPlain)
5231
5232 deferBits := s.variable(deferBitsVar, types.Types[types.TUINT8])
5233
5234
5235 bitval := s.constInt8(types.Types[types.TUINT8], 1<<uint(i))
5236 andval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, bitval)
5237 eqVal := s.newValue2(ssa.OpEq8, types.Types[types.TBOOL], andval, zeroval)
5238 b := s.endBlock()
5239 b.Kind = ssa.BlockIf
5240 b.SetControl(eqVal)
5241 b.AddEdgeTo(bEnd)
5242 b.AddEdgeTo(bCond)
5243 bCond.AddEdgeTo(bEnd)
5244 s.startBlock(bCond)
5245
5246
5247
5248 nbitval := s.newValue1(ssa.OpCom8, types.Types[types.TUINT8], bitval)
5249 maskedval := s.newValue2(ssa.OpAnd8, types.Types[types.TUINT8], deferBits, nbitval)
5250 s.store(types.Types[types.TUINT8], s.deferBitsAddr, maskedval)
5251
5252
5253 s.vars[deferBitsVar] = maskedval
5254
5255
5256
5257
5258 fn := r.n.Fun
5259 stksize := fn.Type().ArgWidth()
5260 var callArgs []*ssa.Value
5261 var call *ssa.Value
5262 if r.closure != nil {
5263 v := s.load(r.closure.Type.Elem(), r.closure)
5264 s.maybeNilCheckClosure(v, callDefer)
5265 codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
5266 aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5267 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
5268 } else {
5269 aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil))
5270 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5271 }
5272 callArgs = append(callArgs, s.mem())
5273 call.AddArgs(callArgs...)
5274 call.AuxInt = stksize
5275 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
5276
5277
5278
5279
5280 if r.closureNode != nil {
5281 s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
5282 }
5283
5284 s.endBlock()
5285 s.startBlock(bEnd)
5286 }
5287 }
5288
5289 func (s *state) callResult(n *ir.CallExpr, k callKind) *ssa.Value {
5290 return s.call(n, k, false, nil)
5291 }
5292
5293 func (s *state) callAddr(n *ir.CallExpr, k callKind) *ssa.Value {
5294 return s.call(n, k, true, nil)
5295 }
5296
5297
5298
5299 func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool, deferExtra ir.Expr) *ssa.Value {
5300 s.prevCall = nil
5301 var calleeLSym *obj.LSym
5302 var closure *ssa.Value
5303 var codeptr *ssa.Value
5304 var dextra *ssa.Value
5305 var rcvr *ssa.Value
5306 fn := n.Fun
5307 var ACArgs []*types.Type
5308 var ACResults []*types.Type
5309 var callArgs []*ssa.Value
5310
5311 callABI := s.f.ABIDefault
5312
5313 if k != callNormal && k != callTail && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.Fun.Type().NumResults() != 0) {
5314 s.Fatalf("go/defer call with arguments: %v", n)
5315 }
5316
5317 switch n.Op() {
5318 case ir.OCALLFUNC:
5319 if (k == callNormal || k == callTail) && fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC {
5320 fn := fn.(*ir.Name)
5321 calleeLSym = callTargetLSym(fn)
5322 if buildcfg.Experiment.RegabiArgs {
5323
5324
5325
5326
5327
5328 if fn.Func != nil {
5329 callABI = abiForFunc(fn.Func, s.f.ABI0, s.f.ABI1)
5330 }
5331 } else {
5332
5333 inRegistersImported := fn.Pragma()&ir.RegisterParams != 0
5334 inRegistersSamePackage := fn.Func != nil && fn.Func.Pragma&ir.RegisterParams != 0
5335 if inRegistersImported || inRegistersSamePackage {
5336 callABI = s.f.ABI1
5337 }
5338 }
5339 break
5340 }
5341 closure = s.expr(fn)
5342 if k != callDefer && k != callDeferStack {
5343
5344
5345 s.maybeNilCheckClosure(closure, k)
5346 }
5347 case ir.OCALLINTER:
5348 if fn.Op() != ir.ODOTINTER {
5349 s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
5350 }
5351 fn := fn.(*ir.SelectorExpr)
5352 var iclosure *ssa.Value
5353 iclosure, rcvr = s.getClosureAndRcvr(fn)
5354 if k == callNormal {
5355 codeptr = s.load(types.Types[types.TUINTPTR], iclosure)
5356 } else {
5357 closure = iclosure
5358 }
5359 }
5360 if deferExtra != nil {
5361 dextra = s.expr(deferExtra)
5362 }
5363
5364 params := callABI.ABIAnalyze(n.Fun.Type(), false )
5365 types.CalcSize(fn.Type())
5366 stksize := params.ArgWidth()
5367
5368 res := n.Fun.Type().Results()
5369 if k == callNormal || k == callTail {
5370 for _, p := range params.OutParams() {
5371 ACResults = append(ACResults, p.Type)
5372 }
5373 }
5374
5375 var call *ssa.Value
5376 if k == callDeferStack {
5377 if stksize != 0 {
5378 s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
5379 }
5380
5381 t := deferstruct()
5382 _, addr := s.temp(n.Pos(), t)
5383 s.store(closure.Type,
5384 s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(deferStructFnField), addr),
5385 closure)
5386
5387
5388 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5389 aux := ssa.StaticAuxCall(ir.Syms.DeferprocStack, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5390 callArgs = append(callArgs, addr, s.mem())
5391 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5392 call.AddArgs(callArgs...)
5393 call.AuxInt = int64(types.PtrSize)
5394 } else {
5395
5396
5397 argStart := base.Ctxt.Arch.FixedFrameSize
5398
5399 if k != callNormal && k != callTail {
5400
5401 ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
5402 callArgs = append(callArgs, closure)
5403 stksize += int64(types.PtrSize)
5404 argStart += int64(types.PtrSize)
5405 if dextra != nil {
5406
5407 ACArgs = append(ACArgs, types.Types[types.TINTER])
5408 callArgs = append(callArgs, dextra)
5409 stksize += 2 * int64(types.PtrSize)
5410 argStart += 2 * int64(types.PtrSize)
5411 }
5412 }
5413
5414
5415 if rcvr != nil {
5416 callArgs = append(callArgs, rcvr)
5417 }
5418
5419
5420 t := n.Fun.Type()
5421 args := n.Args
5422
5423 for _, p := range params.InParams() {
5424 ACArgs = append(ACArgs, p.Type)
5425 }
5426
5427
5428
5429
5430 if s.curBlock.ID == s.f.Entry.ID && s.hasOpenDefers {
5431 b := s.endBlock()
5432 b.Kind = ssa.BlockPlain
5433 curb := s.f.NewBlock(ssa.BlockPlain)
5434 b.AddEdgeTo(curb)
5435 s.startBlock(curb)
5436 }
5437
5438 for i, n := range args {
5439 callArgs = append(callArgs, s.putArg(n, t.Param(i).Type))
5440 }
5441
5442 callArgs = append(callArgs, s.mem())
5443
5444
5445 switch {
5446 case k == callDefer:
5447 sym := ir.Syms.Deferproc
5448 if dextra != nil {
5449 sym = ir.Syms.Deferprocat
5450 }
5451 aux := ssa.StaticAuxCall(sym, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5452 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5453 case k == callGo:
5454 aux := ssa.StaticAuxCall(ir.Syms.Newproc, s.f.ABIDefault.ABIAnalyzeTypes(ACArgs, ACResults))
5455 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5456 case closure != nil:
5457
5458
5459
5460
5461
5462 codeptr = s.rawLoad(types.Types[types.TUINTPTR], closure)
5463 aux := ssa.ClosureAuxCall(callABI.ABIAnalyzeTypes(ACArgs, ACResults))
5464 call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, closure)
5465 case codeptr != nil:
5466
5467 aux := ssa.InterfaceAuxCall(params)
5468 call = s.newValue1A(ssa.OpInterLECall, aux.LateExpansionResultType(), aux, codeptr)
5469 case calleeLSym != nil:
5470 aux := ssa.StaticAuxCall(calleeLSym, params)
5471 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5472 if k == callTail {
5473 call.Op = ssa.OpTailLECall
5474 stksize = 0
5475 }
5476 default:
5477 s.Fatalf("bad call type %v %v", n.Op(), n)
5478 }
5479 call.AddArgs(callArgs...)
5480 call.AuxInt = stksize
5481 }
5482 s.prevCall = call
5483 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
5484
5485 for _, v := range n.KeepAlive {
5486 if !v.Addrtaken() {
5487 s.Fatalf("KeepAlive variable %v must have Addrtaken set", v)
5488 }
5489 switch v.Class {
5490 case ir.PAUTO, ir.PPARAM, ir.PPARAMOUT:
5491 default:
5492 s.Fatalf("KeepAlive variable %v must be Auto or Arg", v)
5493 }
5494 s.vars[memVar] = s.newValue1A(ssa.OpVarLive, types.TypeMem, v, s.mem())
5495 }
5496
5497
5498 if k == callDefer || k == callDeferStack {
5499 b := s.endBlock()
5500 b.Kind = ssa.BlockDefer
5501 b.SetControl(call)
5502 bNext := s.f.NewBlock(ssa.BlockPlain)
5503 b.AddEdgeTo(bNext)
5504
5505 r := s.f.NewBlock(ssa.BlockPlain)
5506 s.startBlock(r)
5507 s.exit()
5508 b.AddEdgeTo(r)
5509 b.Likely = ssa.BranchLikely
5510 s.startBlock(bNext)
5511 }
5512
5513 if len(res) == 0 || k != callNormal {
5514
5515 return nil
5516 }
5517 fp := res[0]
5518 if returnResultAddr {
5519 return s.resultAddrOfCall(call, 0, fp.Type)
5520 }
5521 return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call)
5522 }
5523
5524
5525
5526 func (s *state) maybeNilCheckClosure(closure *ssa.Value, k callKind) {
5527 if Arch.LinkArch.Family == sys.Wasm || buildcfg.GOOS == "aix" && k != callGo {
5528
5529
5530 s.nilCheck(closure)
5531 }
5532 }
5533
5534
5535
5536 func (s *state) getClosureAndRcvr(fn *ir.SelectorExpr) (*ssa.Value, *ssa.Value) {
5537 i := s.expr(fn.X)
5538 itab := s.newValue1(ssa.OpITab, types.Types[types.TUINTPTR], i)
5539 s.nilCheck(itab)
5540 itabidx := fn.Offset() + 2*int64(types.PtrSize) + 8
5541 closure := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.UintptrPtr, itabidx, itab)
5542 rcvr := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, i)
5543 return closure, rcvr
5544 }
5545
5546
5547
5548 func etypesign(e types.Kind) int8 {
5549 switch e {
5550 case types.TINT8, types.TINT16, types.TINT32, types.TINT64, types.TINT:
5551 return -1
5552 case types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINT, types.TUINTPTR, types.TUNSAFEPTR:
5553 return +1
5554 }
5555 return 0
5556 }
5557
5558
5559
5560 func (s *state) addr(n ir.Node) *ssa.Value {
5561 if n.Op() != ir.ONAME {
5562 s.pushLine(n.Pos())
5563 defer s.popLine()
5564 }
5565
5566 if s.canSSA(n) {
5567 s.Fatalf("addr of canSSA expression: %+v", n)
5568 }
5569
5570 t := types.NewPtr(n.Type())
5571 linksymOffset := func(lsym *obj.LSym, offset int64) *ssa.Value {
5572 v := s.entryNewValue1A(ssa.OpAddr, t, lsym, s.sb)
5573
5574 if offset != 0 {
5575 v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, offset, v)
5576 }
5577 return v
5578 }
5579 switch n.Op() {
5580 case ir.OLINKSYMOFFSET:
5581 no := n.(*ir.LinksymOffsetExpr)
5582 return linksymOffset(no.Linksym, no.Offset_)
5583 case ir.ONAME:
5584 n := n.(*ir.Name)
5585 if n.Heapaddr != nil {
5586 return s.expr(n.Heapaddr)
5587 }
5588 switch n.Class {
5589 case ir.PEXTERN:
5590
5591 return linksymOffset(n.Linksym(), 0)
5592 case ir.PPARAM:
5593
5594 v := s.decladdrs[n]
5595 if v != nil {
5596 return v
5597 }
5598 s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
5599 return nil
5600 case ir.PAUTO:
5601 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), !ir.IsAutoTmp(n))
5602
5603 case ir.PPARAMOUT:
5604
5605
5606 return s.newValue2Apos(ssa.OpLocalAddr, t, n, s.sp, s.mem(), true)
5607 default:
5608 s.Fatalf("variable address class %v not implemented", n.Class)
5609 return nil
5610 }
5611 case ir.ORESULT:
5612
5613 n := n.(*ir.ResultExpr)
5614 return s.resultAddrOfCall(s.prevCall, n.Index, n.Type())
5615 case ir.OINDEX:
5616 n := n.(*ir.IndexExpr)
5617 if n.X.Type().IsSlice() {
5618 a := s.expr(n.X)
5619 i := s.expr(n.Index)
5620 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], a)
5621 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5622 p := s.newValue1(ssa.OpSlicePtr, t, a)
5623 return s.newValue2(ssa.OpPtrIndex, t, p, i)
5624 } else {
5625 a := s.addr(n.X)
5626 i := s.expr(n.Index)
5627 len := s.constInt(types.Types[types.TINT], n.X.Type().NumElem())
5628 i = s.boundsCheck(i, len, ssa.BoundsIndex, n.Bounded())
5629 return s.newValue2(ssa.OpPtrIndex, types.NewPtr(n.X.Type().Elem()), a, i)
5630 }
5631 case ir.ODEREF:
5632 n := n.(*ir.StarExpr)
5633 return s.exprPtr(n.X, n.Bounded(), n.Pos())
5634 case ir.ODOT:
5635 n := n.(*ir.SelectorExpr)
5636 p := s.addr(n.X)
5637 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5638 case ir.ODOTPTR:
5639 n := n.(*ir.SelectorExpr)
5640 p := s.exprPtr(n.X, n.Bounded(), n.Pos())
5641 return s.newValue1I(ssa.OpOffPtr, t, n.Offset(), p)
5642 case ir.OCONVNOP:
5643 n := n.(*ir.ConvExpr)
5644 if n.Type() == n.X.Type() {
5645 return s.addr(n.X)
5646 }
5647 addr := s.addr(n.X)
5648 return s.newValue1(ssa.OpCopy, t, addr)
5649 case ir.OCALLFUNC, ir.OCALLINTER:
5650 n := n.(*ir.CallExpr)
5651 return s.callAddr(n, callNormal)
5652 case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
5653 var v *ssa.Value
5654 if n.Op() == ir.ODOTTYPE {
5655 v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
5656 } else {
5657 v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
5658 }
5659 if v.Op != ssa.OpLoad {
5660 s.Fatalf("dottype of non-load")
5661 }
5662 if v.Args[1] != s.mem() {
5663 s.Fatalf("memory no longer live from dottype load")
5664 }
5665 return v.Args[0]
5666 default:
5667 s.Fatalf("unhandled addr %v", n.Op())
5668 return nil
5669 }
5670 }
5671
5672
5673
5674 func (s *state) canSSA(n ir.Node) bool {
5675 if base.Flag.N != 0 {
5676 return false
5677 }
5678 for {
5679 nn := n
5680 if nn.Op() == ir.ODOT {
5681 nn := nn.(*ir.SelectorExpr)
5682 n = nn.X
5683 continue
5684 }
5685 if nn.Op() == ir.OINDEX {
5686 nn := nn.(*ir.IndexExpr)
5687 if nn.X.Type().IsArray() {
5688 n = nn.X
5689 continue
5690 }
5691 }
5692 break
5693 }
5694 if n.Op() != ir.ONAME {
5695 return false
5696 }
5697 return s.canSSAName(n.(*ir.Name)) && ssa.CanSSA(n.Type())
5698 }
5699
5700 func (s *state) canSSAName(name *ir.Name) bool {
5701 if name.Addrtaken() || !name.OnStack() {
5702 return false
5703 }
5704 switch name.Class {
5705 case ir.PPARAMOUT:
5706 if s.hasdefer {
5707
5708
5709
5710
5711
5712 return false
5713 }
5714 if s.cgoUnsafeArgs {
5715
5716
5717 return false
5718 }
5719 }
5720 return true
5721
5722 }
5723
5724
5725 func (s *state) exprPtr(n ir.Node, bounded bool, lineno src.XPos) *ssa.Value {
5726 p := s.expr(n)
5727 if bounded || n.NonNil() {
5728 if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
5729 s.f.Warnl(lineno, "removed nil check")
5730 }
5731 return p
5732 }
5733 p = s.nilCheck(p)
5734 return p
5735 }
5736
5737
5738
5739
5740
5741
5742 func (s *state) nilCheck(ptr *ssa.Value) *ssa.Value {
5743 if base.Debug.DisableNil != 0 || s.curfn.NilCheckDisabled() {
5744 return ptr
5745 }
5746 return s.newValue2(ssa.OpNilCheck, ptr.Type, ptr, s.mem())
5747 }
5748
5749
5750
5751
5752
5753
5754
5755 func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
5756 idx = s.extendIndex(idx, len, kind, bounded)
5757
5758 if bounded || base.Flag.B != 0 {
5759
5760
5761
5762
5763
5764
5765
5766
5767
5768
5769
5770
5771
5772
5773
5774
5775
5776
5777
5778
5779 return idx
5780 }
5781
5782 bNext := s.f.NewBlock(ssa.BlockPlain)
5783 bPanic := s.f.NewBlock(ssa.BlockExit)
5784
5785 if !idx.Type.IsSigned() {
5786 switch kind {
5787 case ssa.BoundsIndex:
5788 kind = ssa.BoundsIndexU
5789 case ssa.BoundsSliceAlen:
5790 kind = ssa.BoundsSliceAlenU
5791 case ssa.BoundsSliceAcap:
5792 kind = ssa.BoundsSliceAcapU
5793 case ssa.BoundsSliceB:
5794 kind = ssa.BoundsSliceBU
5795 case ssa.BoundsSlice3Alen:
5796 kind = ssa.BoundsSlice3AlenU
5797 case ssa.BoundsSlice3Acap:
5798 kind = ssa.BoundsSlice3AcapU
5799 case ssa.BoundsSlice3B:
5800 kind = ssa.BoundsSlice3BU
5801 case ssa.BoundsSlice3C:
5802 kind = ssa.BoundsSlice3CU
5803 }
5804 }
5805
5806 var cmp *ssa.Value
5807 if kind == ssa.BoundsIndex || kind == ssa.BoundsIndexU {
5808 cmp = s.newValue2(ssa.OpIsInBounds, types.Types[types.TBOOL], idx, len)
5809 } else {
5810 cmp = s.newValue2(ssa.OpIsSliceInBounds, types.Types[types.TBOOL], idx, len)
5811 }
5812 b := s.endBlock()
5813 b.Kind = ssa.BlockIf
5814 b.SetControl(cmp)
5815 b.Likely = ssa.BranchLikely
5816 b.AddEdgeTo(bNext)
5817 b.AddEdgeTo(bPanic)
5818
5819 s.startBlock(bPanic)
5820 if Arch.LinkArch.Family == sys.Wasm {
5821
5822
5823 s.rtcall(BoundsCheckFunc[kind], false, nil, idx, len)
5824 } else {
5825 mem := s.newValue3I(ssa.OpPanicBounds, types.TypeMem, int64(kind), idx, len, s.mem())
5826 s.endBlock().SetControl(mem)
5827 }
5828 s.startBlock(bNext)
5829
5830
5831 if base.Flag.Cfg.SpectreIndex {
5832 op := ssa.OpSpectreIndex
5833 if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU {
5834 op = ssa.OpSpectreSliceIndex
5835 }
5836 idx = s.newValue2(op, types.Types[types.TINT], idx, len)
5837 }
5838
5839 return idx
5840 }
5841
5842
5843 func (s *state) check(cmp *ssa.Value, fn *obj.LSym) {
5844 b := s.endBlock()
5845 b.Kind = ssa.BlockIf
5846 b.SetControl(cmp)
5847 b.Likely = ssa.BranchLikely
5848 bNext := s.f.NewBlock(ssa.BlockPlain)
5849 line := s.peekPos()
5850 pos := base.Ctxt.PosTable.Pos(line)
5851 fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()}
5852 bPanic := s.panics[fl]
5853 if bPanic == nil {
5854 bPanic = s.f.NewBlock(ssa.BlockPlain)
5855 s.panics[fl] = bPanic
5856 s.startBlock(bPanic)
5857
5858
5859 s.rtcall(fn, false, nil)
5860 }
5861 b.AddEdgeTo(bNext)
5862 b.AddEdgeTo(bPanic)
5863 s.startBlock(bNext)
5864 }
5865
5866 func (s *state) intDivide(n ir.Node, a, b *ssa.Value) *ssa.Value {
5867 needcheck := true
5868 switch b.Op {
5869 case ssa.OpConst8, ssa.OpConst16, ssa.OpConst32, ssa.OpConst64:
5870 if b.AuxInt != 0 {
5871 needcheck = false
5872 }
5873 }
5874 if needcheck {
5875
5876 cmp := s.newValue2(s.ssaOp(ir.ONE, n.Type()), types.Types[types.TBOOL], b, s.zeroVal(n.Type()))
5877 s.check(cmp, ir.Syms.Panicdivide)
5878 }
5879 return s.newValue2(s.ssaOp(n.Op(), n.Type()), a.Type, a, b)
5880 }
5881
5882
5883
5884
5885
5886 func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value {
5887 s.prevCall = nil
5888
5889 off := base.Ctxt.Arch.FixedFrameSize
5890 var callArgs []*ssa.Value
5891 var callArgTypes []*types.Type
5892
5893 for _, arg := range args {
5894 t := arg.Type
5895 off = types.RoundUp(off, t.Alignment())
5896 size := t.Size()
5897 callArgs = append(callArgs, arg)
5898 callArgTypes = append(callArgTypes, t)
5899 off += size
5900 }
5901 off = types.RoundUp(off, int64(types.RegSize))
5902
5903
5904 var call *ssa.Value
5905 aux := ssa.StaticAuxCall(fn, s.f.ABIDefault.ABIAnalyzeTypes(callArgTypes, results))
5906 callArgs = append(callArgs, s.mem())
5907 call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
5908 call.AddArgs(callArgs...)
5909 s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(results)), call)
5910
5911 if !returns {
5912
5913 b := s.endBlock()
5914 b.Kind = ssa.BlockExit
5915 b.SetControl(call)
5916 call.AuxInt = off - base.Ctxt.Arch.FixedFrameSize
5917 if len(results) > 0 {
5918 s.Fatalf("panic call can't have results")
5919 }
5920 return nil
5921 }
5922
5923
5924 res := make([]*ssa.Value, len(results))
5925 for i, t := range results {
5926 off = types.RoundUp(off, t.Alignment())
5927 res[i] = s.resultOfCall(call, int64(i), t)
5928 off += t.Size()
5929 }
5930 off = types.RoundUp(off, int64(types.PtrSize))
5931
5932
5933 call.AuxInt = off
5934
5935 return res
5936 }
5937
5938
5939 func (s *state) storeType(t *types.Type, left, right *ssa.Value, skip skipMask, leftIsStmt bool) {
5940 s.instrument(t, left, instrumentWrite)
5941
5942 if skip == 0 && (!t.HasPointers() || ssa.IsStackAddr(left)) {
5943
5944 s.vars[memVar] = s.newValue3Apos(ssa.OpStore, types.TypeMem, t, left, right, s.mem(), leftIsStmt)
5945 return
5946 }
5947
5948
5949
5950
5951
5952
5953 s.storeTypeScalars(t, left, right, skip)
5954 if skip&skipPtr == 0 && t.HasPointers() {
5955 s.storeTypePtrs(t, left, right)
5956 }
5957 }
5958
5959
5960 func (s *state) storeTypeScalars(t *types.Type, left, right *ssa.Value, skip skipMask) {
5961 switch {
5962 case t.IsBoolean() || t.IsInteger() || t.IsFloat() || t.IsComplex():
5963 s.store(t, left, right)
5964 case t.IsPtrShaped():
5965 if t.IsPtr() && t.Elem().NotInHeap() {
5966 s.store(t, left, right)
5967 }
5968
5969 case t.IsString():
5970 if skip&skipLen != 0 {
5971 return
5972 }
5973 len := s.newValue1(ssa.OpStringLen, types.Types[types.TINT], right)
5974 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5975 s.store(types.Types[types.TINT], lenAddr, len)
5976 case t.IsSlice():
5977 if skip&skipLen == 0 {
5978 len := s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], right)
5979 lenAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, s.config.PtrSize, left)
5980 s.store(types.Types[types.TINT], lenAddr, len)
5981 }
5982 if skip&skipCap == 0 {
5983 cap := s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], right)
5984 capAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.IntPtr, 2*s.config.PtrSize, left)
5985 s.store(types.Types[types.TINT], capAddr, cap)
5986 }
5987 case t.IsInterface():
5988
5989 itab := s.newValue1(ssa.OpITab, s.f.Config.Types.BytePtr, right)
5990 s.store(types.Types[types.TUINTPTR], left, itab)
5991 case t.IsStruct():
5992 n := t.NumFields()
5993 for i := 0; i < n; i++ {
5994 ft := t.FieldType(i)
5995 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
5996 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
5997 s.storeTypeScalars(ft, addr, val, 0)
5998 }
5999 case t.IsArray() && t.NumElem() == 0:
6000
6001 case t.IsArray() && t.NumElem() == 1:
6002 s.storeTypeScalars(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right), 0)
6003 default:
6004 s.Fatalf("bad write barrier type %v", t)
6005 }
6006 }
6007
6008
6009 func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) {
6010 switch {
6011 case t.IsPtrShaped():
6012 if t.IsPtr() && t.Elem().NotInHeap() {
6013 break
6014 }
6015 s.store(t, left, right)
6016 case t.IsString():
6017 ptr := s.newValue1(ssa.OpStringPtr, s.f.Config.Types.BytePtr, right)
6018 s.store(s.f.Config.Types.BytePtr, left, ptr)
6019 case t.IsSlice():
6020 elType := types.NewPtr(t.Elem())
6021 ptr := s.newValue1(ssa.OpSlicePtr, elType, right)
6022 s.store(elType, left, ptr)
6023 case t.IsInterface():
6024
6025 idata := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, right)
6026 idataAddr := s.newValue1I(ssa.OpOffPtr, s.f.Config.Types.BytePtrPtr, s.config.PtrSize, left)
6027 s.store(s.f.Config.Types.BytePtr, idataAddr, idata)
6028 case t.IsStruct():
6029 n := t.NumFields()
6030 for i := 0; i < n; i++ {
6031 ft := t.FieldType(i)
6032 if !ft.HasPointers() {
6033 continue
6034 }
6035 addr := s.newValue1I(ssa.OpOffPtr, ft.PtrTo(), t.FieldOff(i), left)
6036 val := s.newValue1I(ssa.OpStructSelect, ft, int64(i), right)
6037 s.storeTypePtrs(ft, addr, val)
6038 }
6039 case t.IsArray() && t.NumElem() == 0:
6040
6041 case t.IsArray() && t.NumElem() == 1:
6042 s.storeTypePtrs(t.Elem(), left, s.newValue1I(ssa.OpArraySelect, t.Elem(), 0, right))
6043 default:
6044 s.Fatalf("bad write barrier type %v", t)
6045 }
6046 }
6047
6048
6049 func (s *state) putArg(n ir.Node, t *types.Type) *ssa.Value {
6050 var a *ssa.Value
6051 if !ssa.CanSSA(t) {
6052 a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem())
6053 } else {
6054 a = s.expr(n)
6055 }
6056 return a
6057 }
6058
6059 func (s *state) storeArgWithBase(n ir.Node, t *types.Type, base *ssa.Value, off int64) {
6060 pt := types.NewPtr(t)
6061 var addr *ssa.Value
6062 if base == s.sp {
6063
6064 addr = s.constOffPtrSP(pt, off)
6065 } else {
6066 addr = s.newValue1I(ssa.OpOffPtr, pt, off, base)
6067 }
6068
6069 if !ssa.CanSSA(t) {
6070 a := s.addr(n)
6071 s.move(t, addr, a)
6072 return
6073 }
6074
6075 a := s.expr(n)
6076 s.storeType(t, addr, a, 0, false)
6077 }
6078
6079
6080
6081
6082 func (s *state) slice(v, i, j, k *ssa.Value, bounded bool) (p, l, c *ssa.Value) {
6083 t := v.Type
6084 var ptr, len, cap *ssa.Value
6085 switch {
6086 case t.IsSlice():
6087 ptr = s.newValue1(ssa.OpSlicePtr, types.NewPtr(t.Elem()), v)
6088 len = s.newValue1(ssa.OpSliceLen, types.Types[types.TINT], v)
6089 cap = s.newValue1(ssa.OpSliceCap, types.Types[types.TINT], v)
6090 case t.IsString():
6091 ptr = s.newValue1(ssa.OpStringPtr, types.NewPtr(types.Types[types.TUINT8]), v)
6092 len = s.newValue1(ssa.OpStringLen, types.Types[types.TINT], v)
6093 cap = len
6094 case t.IsPtr():
6095 if !t.Elem().IsArray() {
6096 s.Fatalf("bad ptr to array in slice %v\n", t)
6097 }
6098 nv := s.nilCheck(v)
6099 ptr = s.newValue1(ssa.OpCopy, types.NewPtr(t.Elem().Elem()), nv)
6100 len = s.constInt(types.Types[types.TINT], t.Elem().NumElem())
6101 cap = len
6102 default:
6103 s.Fatalf("bad type in slice %v\n", t)
6104 }
6105
6106
6107 if i == nil {
6108 i = s.constInt(types.Types[types.TINT], 0)
6109 }
6110 if j == nil {
6111 j = len
6112 }
6113 three := true
6114 if k == nil {
6115 three = false
6116 k = cap
6117 }
6118
6119
6120
6121
6122 if three {
6123 if k != cap {
6124 kind := ssa.BoundsSlice3Alen
6125 if t.IsSlice() {
6126 kind = ssa.BoundsSlice3Acap
6127 }
6128 k = s.boundsCheck(k, cap, kind, bounded)
6129 }
6130 if j != k {
6131 j = s.boundsCheck(j, k, ssa.BoundsSlice3B, bounded)
6132 }
6133 i = s.boundsCheck(i, j, ssa.BoundsSlice3C, bounded)
6134 } else {
6135 if j != k {
6136 kind := ssa.BoundsSliceAlen
6137 if t.IsSlice() {
6138 kind = ssa.BoundsSliceAcap
6139 }
6140 j = s.boundsCheck(j, k, kind, bounded)
6141 }
6142 i = s.boundsCheck(i, j, ssa.BoundsSliceB, bounded)
6143 }
6144
6145
6146 subOp := s.ssaOp(ir.OSUB, types.Types[types.TINT])
6147 mulOp := s.ssaOp(ir.OMUL, types.Types[types.TINT])
6148 andOp := s.ssaOp(ir.OAND, types.Types[types.TINT])
6149
6150
6151
6152
6153
6154 rlen := s.newValue2(subOp, types.Types[types.TINT], j, i)
6155 rcap := rlen
6156 if j != k && !t.IsString() {
6157 rcap = s.newValue2(subOp, types.Types[types.TINT], k, i)
6158 }
6159
6160 if (i.Op == ssa.OpConst64 || i.Op == ssa.OpConst32) && i.AuxInt == 0 {
6161
6162 return ptr, rlen, rcap
6163 }
6164
6165
6166
6167
6168
6169
6170
6171
6172
6173
6174
6175
6176
6177
6178
6179 stride := s.constInt(types.Types[types.TINT], ptr.Type.Elem().Size())
6180
6181
6182 delta := s.newValue2(mulOp, types.Types[types.TINT], i, stride)
6183
6184
6185
6186 mask := s.newValue1(ssa.OpSlicemask, types.Types[types.TINT], rcap)
6187 delta = s.newValue2(andOp, types.Types[types.TINT], delta, mask)
6188
6189
6190 rptr := s.newValue2(ssa.OpAddPtr, ptr.Type, ptr, delta)
6191
6192 return rptr, rlen, rcap
6193 }
6194
6195 type u642fcvtTab struct {
6196 leq, cvt2F, and, rsh, or, add ssa.Op
6197 one func(*state, *types.Type, int64) *ssa.Value
6198 }
6199
6200 var u64_f64 = u642fcvtTab{
6201 leq: ssa.OpLeq64,
6202 cvt2F: ssa.OpCvt64to64F,
6203 and: ssa.OpAnd64,
6204 rsh: ssa.OpRsh64Ux64,
6205 or: ssa.OpOr64,
6206 add: ssa.OpAdd64F,
6207 one: (*state).constInt64,
6208 }
6209
6210 var u64_f32 = u642fcvtTab{
6211 leq: ssa.OpLeq64,
6212 cvt2F: ssa.OpCvt64to32F,
6213 and: ssa.OpAnd64,
6214 rsh: ssa.OpRsh64Ux64,
6215 or: ssa.OpOr64,
6216 add: ssa.OpAdd32F,
6217 one: (*state).constInt64,
6218 }
6219
6220 func (s *state) uint64Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6221 return s.uint64Tofloat(&u64_f64, n, x, ft, tt)
6222 }
6223
6224 func (s *state) uint64Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6225 return s.uint64Tofloat(&u64_f32, n, x, ft, tt)
6226 }
6227
6228 func (s *state) uint64Tofloat(cvttab *u642fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6229
6230
6231
6232
6233
6234
6235
6236
6237
6238
6239
6240
6241
6242
6243
6244
6245
6246
6247
6248
6249
6250
6251
6252
6253 cmp := s.newValue2(cvttab.leq, types.Types[types.TBOOL], s.zeroVal(ft), x)
6254 b := s.endBlock()
6255 b.Kind = ssa.BlockIf
6256 b.SetControl(cmp)
6257 b.Likely = ssa.BranchLikely
6258
6259 bThen := s.f.NewBlock(ssa.BlockPlain)
6260 bElse := s.f.NewBlock(ssa.BlockPlain)
6261 bAfter := s.f.NewBlock(ssa.BlockPlain)
6262
6263 b.AddEdgeTo(bThen)
6264 s.startBlock(bThen)
6265 a0 := s.newValue1(cvttab.cvt2F, tt, x)
6266 s.vars[n] = a0
6267 s.endBlock()
6268 bThen.AddEdgeTo(bAfter)
6269
6270 b.AddEdgeTo(bElse)
6271 s.startBlock(bElse)
6272 one := cvttab.one(s, ft, 1)
6273 y := s.newValue2(cvttab.and, ft, x, one)
6274 z := s.newValue2(cvttab.rsh, ft, x, one)
6275 z = s.newValue2(cvttab.or, ft, z, y)
6276 a := s.newValue1(cvttab.cvt2F, tt, z)
6277 a1 := s.newValue2(cvttab.add, tt, a, a)
6278 s.vars[n] = a1
6279 s.endBlock()
6280 bElse.AddEdgeTo(bAfter)
6281
6282 s.startBlock(bAfter)
6283 return s.variable(n, n.Type())
6284 }
6285
6286 type u322fcvtTab struct {
6287 cvtI2F, cvtF2F ssa.Op
6288 }
6289
6290 var u32_f64 = u322fcvtTab{
6291 cvtI2F: ssa.OpCvt32to64F,
6292 cvtF2F: ssa.OpCopy,
6293 }
6294
6295 var u32_f32 = u322fcvtTab{
6296 cvtI2F: ssa.OpCvt32to32F,
6297 cvtF2F: ssa.OpCvt64Fto32F,
6298 }
6299
6300 func (s *state) uint32Tofloat64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6301 return s.uint32Tofloat(&u32_f64, n, x, ft, tt)
6302 }
6303
6304 func (s *state) uint32Tofloat32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6305 return s.uint32Tofloat(&u32_f32, n, x, ft, tt)
6306 }
6307
6308 func (s *state) uint32Tofloat(cvttab *u322fcvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6309
6310
6311
6312
6313
6314 cmp := s.newValue2(ssa.OpLeq32, types.Types[types.TBOOL], s.zeroVal(ft), x)
6315 b := s.endBlock()
6316 b.Kind = ssa.BlockIf
6317 b.SetControl(cmp)
6318 b.Likely = ssa.BranchLikely
6319
6320 bThen := s.f.NewBlock(ssa.BlockPlain)
6321 bElse := s.f.NewBlock(ssa.BlockPlain)
6322 bAfter := s.f.NewBlock(ssa.BlockPlain)
6323
6324 b.AddEdgeTo(bThen)
6325 s.startBlock(bThen)
6326 a0 := s.newValue1(cvttab.cvtI2F, tt, x)
6327 s.vars[n] = a0
6328 s.endBlock()
6329 bThen.AddEdgeTo(bAfter)
6330
6331 b.AddEdgeTo(bElse)
6332 s.startBlock(bElse)
6333 a1 := s.newValue1(ssa.OpCvt32to64F, types.Types[types.TFLOAT64], x)
6334 twoToThe32 := s.constFloat64(types.Types[types.TFLOAT64], float64(1<<32))
6335 a2 := s.newValue2(ssa.OpAdd64F, types.Types[types.TFLOAT64], a1, twoToThe32)
6336 a3 := s.newValue1(cvttab.cvtF2F, tt, a2)
6337
6338 s.vars[n] = a3
6339 s.endBlock()
6340 bElse.AddEdgeTo(bAfter)
6341
6342 s.startBlock(bAfter)
6343 return s.variable(n, n.Type())
6344 }
6345
6346
6347 func (s *state) referenceTypeBuiltin(n *ir.UnaryExpr, x *ssa.Value) *ssa.Value {
6348 if !n.X.Type().IsMap() && !n.X.Type().IsChan() {
6349 s.Fatalf("node must be a map or a channel")
6350 }
6351
6352
6353
6354
6355
6356
6357
6358
6359 lenType := n.Type()
6360 nilValue := s.constNil(types.Types[types.TUINTPTR])
6361 cmp := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], x, nilValue)
6362 b := s.endBlock()
6363 b.Kind = ssa.BlockIf
6364 b.SetControl(cmp)
6365 b.Likely = ssa.BranchUnlikely
6366
6367 bThen := s.f.NewBlock(ssa.BlockPlain)
6368 bElse := s.f.NewBlock(ssa.BlockPlain)
6369 bAfter := s.f.NewBlock(ssa.BlockPlain)
6370
6371
6372 b.AddEdgeTo(bThen)
6373 s.startBlock(bThen)
6374 s.vars[n] = s.zeroVal(lenType)
6375 s.endBlock()
6376 bThen.AddEdgeTo(bAfter)
6377
6378 b.AddEdgeTo(bElse)
6379 s.startBlock(bElse)
6380 switch n.Op() {
6381 case ir.OLEN:
6382
6383 s.vars[n] = s.load(lenType, x)
6384 case ir.OCAP:
6385
6386 sw := s.newValue1I(ssa.OpOffPtr, lenType.PtrTo(), lenType.Size(), x)
6387 s.vars[n] = s.load(lenType, sw)
6388 default:
6389 s.Fatalf("op must be OLEN or OCAP")
6390 }
6391 s.endBlock()
6392 bElse.AddEdgeTo(bAfter)
6393
6394 s.startBlock(bAfter)
6395 return s.variable(n, lenType)
6396 }
6397
6398 type f2uCvtTab struct {
6399 ltf, cvt2U, subf, or ssa.Op
6400 floatValue func(*state, *types.Type, float64) *ssa.Value
6401 intValue func(*state, *types.Type, int64) *ssa.Value
6402 cutoff uint64
6403 }
6404
6405 var f32_u64 = f2uCvtTab{
6406 ltf: ssa.OpLess32F,
6407 cvt2U: ssa.OpCvt32Fto64,
6408 subf: ssa.OpSub32F,
6409 or: ssa.OpOr64,
6410 floatValue: (*state).constFloat32,
6411 intValue: (*state).constInt64,
6412 cutoff: 1 << 63,
6413 }
6414
6415 var f64_u64 = f2uCvtTab{
6416 ltf: ssa.OpLess64F,
6417 cvt2U: ssa.OpCvt64Fto64,
6418 subf: ssa.OpSub64F,
6419 or: ssa.OpOr64,
6420 floatValue: (*state).constFloat64,
6421 intValue: (*state).constInt64,
6422 cutoff: 1 << 63,
6423 }
6424
6425 var f32_u32 = f2uCvtTab{
6426 ltf: ssa.OpLess32F,
6427 cvt2U: ssa.OpCvt32Fto32,
6428 subf: ssa.OpSub32F,
6429 or: ssa.OpOr32,
6430 floatValue: (*state).constFloat32,
6431 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6432 cutoff: 1 << 31,
6433 }
6434
6435 var f64_u32 = f2uCvtTab{
6436 ltf: ssa.OpLess64F,
6437 cvt2U: ssa.OpCvt64Fto32,
6438 subf: ssa.OpSub64F,
6439 or: ssa.OpOr32,
6440 floatValue: (*state).constFloat64,
6441 intValue: func(s *state, t *types.Type, v int64) *ssa.Value { return s.constInt32(t, int32(v)) },
6442 cutoff: 1 << 31,
6443 }
6444
6445 func (s *state) float32ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6446 return s.floatToUint(&f32_u64, n, x, ft, tt)
6447 }
6448 func (s *state) float64ToUint64(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6449 return s.floatToUint(&f64_u64, n, x, ft, tt)
6450 }
6451
6452 func (s *state) float32ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6453 return s.floatToUint(&f32_u32, n, x, ft, tt)
6454 }
6455
6456 func (s *state) float64ToUint32(n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6457 return s.floatToUint(&f64_u32, n, x, ft, tt)
6458 }
6459
6460 func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *types.Type) *ssa.Value {
6461
6462
6463
6464
6465
6466
6467
6468
6469 cutoff := cvttab.floatValue(s, ft, float64(cvttab.cutoff))
6470 cmp := s.newValue2(cvttab.ltf, types.Types[types.TBOOL], x, cutoff)
6471 b := s.endBlock()
6472 b.Kind = ssa.BlockIf
6473 b.SetControl(cmp)
6474 b.Likely = ssa.BranchLikely
6475
6476 bThen := s.f.NewBlock(ssa.BlockPlain)
6477 bElse := s.f.NewBlock(ssa.BlockPlain)
6478 bAfter := s.f.NewBlock(ssa.BlockPlain)
6479
6480 b.AddEdgeTo(bThen)
6481 s.startBlock(bThen)
6482 a0 := s.newValue1(cvttab.cvt2U, tt, x)
6483 s.vars[n] = a0
6484 s.endBlock()
6485 bThen.AddEdgeTo(bAfter)
6486
6487 b.AddEdgeTo(bElse)
6488 s.startBlock(bElse)
6489 y := s.newValue2(cvttab.subf, ft, x, cutoff)
6490 y = s.newValue1(cvttab.cvt2U, tt, y)
6491 z := cvttab.intValue(s, tt, int64(-cvttab.cutoff))
6492 a1 := s.newValue2(cvttab.or, tt, y, z)
6493 s.vars[n] = a1
6494 s.endBlock()
6495 bElse.AddEdgeTo(bAfter)
6496
6497 s.startBlock(bAfter)
6498 return s.variable(n, n.Type())
6499 }
6500
6501
6502
6503
6504 func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6505 iface := s.expr(n.X)
6506 target := s.reflectType(n.Type())
6507 var targetItab *ssa.Value
6508 if n.ITab != nil {
6509 targetItab = s.expr(n.ITab)
6510 }
6511 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, nil, target, targetItab, commaok, n.Descriptor)
6512 }
6513
6514 func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
6515 iface := s.expr(n.X)
6516 var source, target, targetItab *ssa.Value
6517 if n.SrcRType != nil {
6518 source = s.expr(n.SrcRType)
6519 }
6520 if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
6521 byteptr := s.f.Config.Types.BytePtr
6522 targetItab = s.expr(n.ITab)
6523
6524
6525 target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), targetItab))
6526 } else {
6527 target = s.expr(n.RType)
6528 }
6529 return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, source, target, targetItab, commaok, nil)
6530 }
6531
6532
6533
6534
6535
6536
6537
6538
6539
6540 func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, target, targetItab *ssa.Value, commaok bool, descriptor *obj.LSym) (res, resok *ssa.Value) {
6541 typs := s.f.Config.Types
6542 byteptr := typs.BytePtr
6543 if dst.IsInterface() {
6544 if dst.IsEmptyInterface() {
6545
6546
6547 if base.Debug.TypeAssert > 0 {
6548 base.WarnfAt(pos, "type assertion inlined")
6549 }
6550
6551
6552 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6553
6554 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6555
6556 if src.IsEmptyInterface() && commaok {
6557
6558 return iface, cond
6559 }
6560
6561
6562 b := s.endBlock()
6563 b.Kind = ssa.BlockIf
6564 b.SetControl(cond)
6565 b.Likely = ssa.BranchLikely
6566 bOk := s.f.NewBlock(ssa.BlockPlain)
6567 bFail := s.f.NewBlock(ssa.BlockPlain)
6568 b.AddEdgeTo(bOk)
6569 b.AddEdgeTo(bFail)
6570
6571 if !commaok {
6572
6573 s.startBlock(bFail)
6574 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6575
6576
6577 s.startBlock(bOk)
6578 if src.IsEmptyInterface() {
6579 res = iface
6580 return
6581 }
6582
6583 off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
6584 typ := s.load(byteptr, off)
6585 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6586 res = s.newValue2(ssa.OpIMake, dst, typ, idata)
6587 return
6588 }
6589
6590 s.startBlock(bOk)
6591
6592
6593 off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
6594 s.vars[typVar] = s.load(byteptr, off)
6595 s.endBlock()
6596
6597
6598 s.startBlock(bFail)
6599 s.vars[typVar] = itab
6600 s.endBlock()
6601
6602
6603 bEnd := s.f.NewBlock(ssa.BlockPlain)
6604 bOk.AddEdgeTo(bEnd)
6605 bFail.AddEdgeTo(bEnd)
6606 s.startBlock(bEnd)
6607 idata := s.newValue1(ssa.OpIData, byteptr, iface)
6608 res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
6609 resok = cond
6610 delete(s.vars, typVar)
6611 return
6612 }
6613
6614 if base.Debug.TypeAssert > 0 {
6615 base.WarnfAt(pos, "type assertion not inlined")
6616 }
6617
6618 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6619 data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
6620
6621
6622 bNil := s.f.NewBlock(ssa.BlockPlain)
6623 bNonNil := s.f.NewBlock(ssa.BlockPlain)
6624 bMerge := s.f.NewBlock(ssa.BlockPlain)
6625 cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6626 b := s.endBlock()
6627 b.Kind = ssa.BlockIf
6628 b.SetControl(cond)
6629 b.Likely = ssa.BranchLikely
6630 b.AddEdgeTo(bNonNil)
6631 b.AddEdgeTo(bNil)
6632
6633 s.startBlock(bNil)
6634 if commaok {
6635 s.vars[typVar] = itab
6636 b := s.endBlock()
6637 b.AddEdgeTo(bMerge)
6638 } else {
6639
6640 s.rtcall(ir.Syms.Panicnildottype, false, nil, target)
6641 }
6642
6643
6644 s.startBlock(bNonNil)
6645 typ := itab
6646 if !src.IsEmptyInterface() {
6647 typ = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab))
6648 }
6649
6650
6651 var d *ssa.Value
6652 if descriptor != nil {
6653 d = s.newValue1A(ssa.OpAddr, byteptr, descriptor, s.sb)
6654 if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
6655
6656
6657 if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "runtime/internal/atomic", "Loadp"}]; !ok {
6658 s.Fatalf("atomic load not available")
6659 }
6660
6661 var mul, and, add, zext ssa.Op
6662 if s.config.PtrSize == 4 {
6663 mul = ssa.OpMul32
6664 and = ssa.OpAnd32
6665 add = ssa.OpAdd32
6666 zext = ssa.OpCopy
6667 } else {
6668 mul = ssa.OpMul64
6669 and = ssa.OpAnd64
6670 add = ssa.OpAdd64
6671 zext = ssa.OpZeroExt32to64
6672 }
6673
6674 loopHead := s.f.NewBlock(ssa.BlockPlain)
6675 loopBody := s.f.NewBlock(ssa.BlockPlain)
6676 cacheHit := s.f.NewBlock(ssa.BlockPlain)
6677 cacheMiss := s.f.NewBlock(ssa.BlockPlain)
6678
6679
6680
6681 atomicLoad := s.newValue2(ssa.OpAtomicLoadPtr, types.NewTuple(typs.BytePtr, types.TypeMem), d, s.mem())
6682 cache := s.newValue1(ssa.OpSelect0, typs.BytePtr, atomicLoad)
6683 s.vars[memVar] = s.newValue1(ssa.OpSelect1, types.TypeMem, atomicLoad)
6684
6685
6686 var hash *ssa.Value
6687 if src.IsEmptyInterface() {
6688 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, 2*s.config.PtrSize, typ), s.mem())
6689 } else {
6690 hash = s.newValue2(ssa.OpLoad, typs.UInt32, s.newValue1I(ssa.OpOffPtr, typs.UInt32Ptr, 2*s.config.PtrSize, itab), s.mem())
6691 }
6692 hash = s.newValue1(zext, typs.Uintptr, hash)
6693 s.vars[hashVar] = hash
6694
6695 mask := s.newValue2(ssa.OpLoad, typs.Uintptr, cache, s.mem())
6696
6697 b := s.endBlock()
6698 b.AddEdgeTo(loopHead)
6699
6700
6701
6702 s.startBlock(loopHead)
6703 idx := s.newValue2(and, typs.Uintptr, s.variable(hashVar, typs.Uintptr), mask)
6704 idx = s.newValue2(mul, typs.Uintptr, idx, s.uintptrConstant(uint64(2*s.config.PtrSize)))
6705 idx = s.newValue2(add, typs.Uintptr, idx, s.uintptrConstant(uint64(s.config.PtrSize)))
6706 e := s.newValue2(ssa.OpAddPtr, typs.UintptrPtr, cache, idx)
6707
6708 s.vars[hashVar] = s.newValue2(add, typs.Uintptr, s.variable(hashVar, typs.Uintptr), s.uintptrConstant(1))
6709
6710
6711
6712 eTyp := s.newValue2(ssa.OpLoad, typs.Uintptr, e, s.mem())
6713 cmp1 := s.newValue2(ssa.OpEqPtr, typs.Bool, typ, eTyp)
6714 b = s.endBlock()
6715 b.Kind = ssa.BlockIf
6716 b.SetControl(cmp1)
6717 b.AddEdgeTo(cacheHit)
6718 b.AddEdgeTo(loopBody)
6719
6720
6721
6722 s.startBlock(loopBody)
6723 cmp2 := s.newValue2(ssa.OpEqPtr, typs.Bool, eTyp, s.constNil(typs.BytePtr))
6724 b = s.endBlock()
6725 b.Kind = ssa.BlockIf
6726 b.SetControl(cmp2)
6727 b.AddEdgeTo(cacheMiss)
6728 b.AddEdgeTo(loopHead)
6729
6730
6731
6732 s.startBlock(cacheHit)
6733 eItab := s.newValue2(ssa.OpLoad, typs.BytePtr, s.newValue1I(ssa.OpOffPtr, typs.BytePtrPtr, s.config.PtrSize, e), s.mem())
6734 s.vars[typVar] = eItab
6735 b = s.endBlock()
6736 b.AddEdgeTo(bMerge)
6737
6738
6739 s.startBlock(cacheMiss)
6740 }
6741 }
6742
6743
6744 if descriptor != nil {
6745 itab = s.rtcall(ir.Syms.TypeAssert, true, []*types.Type{byteptr}, d, typ)[0]
6746 } else {
6747 var fn *obj.LSym
6748 if commaok {
6749 fn = ir.Syms.AssertE2I2
6750 } else {
6751 fn = ir.Syms.AssertE2I
6752 }
6753 itab = s.rtcall(fn, true, []*types.Type{byteptr}, target, typ)[0]
6754 }
6755 s.vars[typVar] = itab
6756 b = s.endBlock()
6757 b.AddEdgeTo(bMerge)
6758
6759
6760 s.startBlock(bMerge)
6761 itab = s.variable(typVar, byteptr)
6762 var ok *ssa.Value
6763 if commaok {
6764 ok = s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
6765 }
6766 return s.newValue2(ssa.OpIMake, dst, itab, data), ok
6767 }
6768
6769 if base.Debug.TypeAssert > 0 {
6770 base.WarnfAt(pos, "type assertion inlined")
6771 }
6772
6773
6774 direct := types.IsDirectIface(dst)
6775 itab := s.newValue1(ssa.OpITab, byteptr, iface)
6776 if base.Debug.TypeAssert > 0 {
6777 base.WarnfAt(pos, "type assertion inlined")
6778 }
6779 var wantedFirstWord *ssa.Value
6780 if src.IsEmptyInterface() {
6781
6782 wantedFirstWord = target
6783 } else {
6784
6785 wantedFirstWord = targetItab
6786 }
6787
6788 var tmp ir.Node
6789 var addr *ssa.Value
6790 if commaok && !ssa.CanSSA(dst) {
6791
6792
6793 tmp, addr = s.temp(pos, dst)
6794 }
6795
6796 cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
6797 b := s.endBlock()
6798 b.Kind = ssa.BlockIf
6799 b.SetControl(cond)
6800 b.Likely = ssa.BranchLikely
6801
6802 bOk := s.f.NewBlock(ssa.BlockPlain)
6803 bFail := s.f.NewBlock(ssa.BlockPlain)
6804 b.AddEdgeTo(bOk)
6805 b.AddEdgeTo(bFail)
6806
6807 if !commaok {
6808
6809 s.startBlock(bFail)
6810 taddr := source
6811 if taddr == nil {
6812 taddr = s.reflectType(src)
6813 }
6814 if src.IsEmptyInterface() {
6815 s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
6816 } else {
6817 s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
6818 }
6819
6820
6821 s.startBlock(bOk)
6822 if direct {
6823 return s.newValue1(ssa.OpIData, dst, iface), nil
6824 }
6825 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6826 return s.load(dst, p), nil
6827 }
6828
6829
6830
6831 bEnd := s.f.NewBlock(ssa.BlockPlain)
6832
6833
6834 valVar := ssaMarker("val")
6835
6836
6837 s.startBlock(bOk)
6838 if tmp == nil {
6839 if direct {
6840 s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
6841 } else {
6842 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6843 s.vars[valVar] = s.load(dst, p)
6844 }
6845 } else {
6846 p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
6847 s.move(dst, addr, p)
6848 }
6849 s.vars[okVar] = s.constBool(true)
6850 s.endBlock()
6851 bOk.AddEdgeTo(bEnd)
6852
6853
6854 s.startBlock(bFail)
6855 if tmp == nil {
6856 s.vars[valVar] = s.zeroVal(dst)
6857 } else {
6858 s.zero(dst, addr)
6859 }
6860 s.vars[okVar] = s.constBool(false)
6861 s.endBlock()
6862 bFail.AddEdgeTo(bEnd)
6863
6864
6865 s.startBlock(bEnd)
6866 if tmp == nil {
6867 res = s.variable(valVar, dst)
6868 delete(s.vars, valVar)
6869 } else {
6870 res = s.load(dst, addr)
6871 }
6872 resok = s.variable(okVar, types.Types[types.TBOOL])
6873 delete(s.vars, okVar)
6874 return res, resok
6875 }
6876
6877
6878 func (s *state) temp(pos src.XPos, t *types.Type) (*ir.Name, *ssa.Value) {
6879 tmp := typecheck.TempAt(pos, s.curfn, t)
6880 if t.HasPointers() {
6881 s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp, s.mem())
6882 }
6883 addr := s.addr(tmp)
6884 return tmp, addr
6885 }
6886
6887
6888 func (s *state) variable(n ir.Node, t *types.Type) *ssa.Value {
6889 v := s.vars[n]
6890 if v != nil {
6891 return v
6892 }
6893 v = s.fwdVars[n]
6894 if v != nil {
6895 return v
6896 }
6897
6898 if s.curBlock == s.f.Entry {
6899
6900 s.f.Fatalf("value %v (%v) incorrectly live at entry", n, v)
6901 }
6902
6903
6904 v = s.newValue0A(ssa.OpFwdRef, t, fwdRefAux{N: n})
6905 s.fwdVars[n] = v
6906 if n.Op() == ir.ONAME {
6907 s.addNamedValue(n.(*ir.Name), v)
6908 }
6909 return v
6910 }
6911
6912 func (s *state) mem() *ssa.Value {
6913 return s.variable(memVar, types.TypeMem)
6914 }
6915
6916 func (s *state) addNamedValue(n *ir.Name, v *ssa.Value) {
6917 if n.Class == ir.Pxxx {
6918
6919 return
6920 }
6921 if ir.IsAutoTmp(n) {
6922
6923 return
6924 }
6925 if n.Class == ir.PPARAMOUT {
6926
6927
6928 return
6929 }
6930 loc := ssa.LocalSlot{N: n, Type: n.Type(), Off: 0}
6931 values, ok := s.f.NamedValues[loc]
6932 if !ok {
6933 s.f.Names = append(s.f.Names, &loc)
6934 s.f.CanonicalLocalSlots[loc] = &loc
6935 }
6936 s.f.NamedValues[loc] = append(values, v)
6937 }
6938
6939
6940 type Branch struct {
6941 P *obj.Prog
6942 B *ssa.Block
6943 }
6944
6945
6946 type State struct {
6947 ABI obj.ABI
6948
6949 pp *objw.Progs
6950
6951
6952
6953 Branches []Branch
6954
6955
6956 JumpTables []*ssa.Block
6957
6958
6959 bstart []*obj.Prog
6960
6961 maxarg int64
6962
6963
6964
6965 livenessMap liveness.Map
6966
6967
6968
6969 partLiveArgs map[*ir.Name]bool
6970
6971
6972
6973
6974 lineRunStart *obj.Prog
6975
6976
6977 OnWasmStackSkipped int
6978 }
6979
6980 func (s *State) FuncInfo() *obj.FuncInfo {
6981 return s.pp.CurFunc.LSym.Func()
6982 }
6983
6984
6985 func (s *State) Prog(as obj.As) *obj.Prog {
6986 p := s.pp.Prog(as)
6987 if objw.LosesStmtMark(as) {
6988 return p
6989 }
6990
6991
6992 if s.lineRunStart == nil || s.lineRunStart.Pos.Line() != p.Pos.Line() {
6993 s.lineRunStart = p
6994 } else if p.Pos.IsStmt() == src.PosIsStmt {
6995 s.lineRunStart.Pos = s.lineRunStart.Pos.WithIsStmt()
6996 p.Pos = p.Pos.WithNotStmt()
6997 }
6998 return p
6999 }
7000
7001
7002 func (s *State) Pc() *obj.Prog {
7003 return s.pp.Next
7004 }
7005
7006
7007 func (s *State) SetPos(pos src.XPos) {
7008 s.pp.Pos = pos
7009 }
7010
7011
7012
7013
7014 func (s *State) Br(op obj.As, target *ssa.Block) *obj.Prog {
7015 p := s.Prog(op)
7016 p.To.Type = obj.TYPE_BRANCH
7017 s.Branches = append(s.Branches, Branch{P: p, B: target})
7018 return p
7019 }
7020
7021
7022
7023
7024
7025
7026 func (s *State) DebugFriendlySetPosFrom(v *ssa.Value) {
7027 switch v.Op {
7028 case ssa.OpPhi, ssa.OpCopy, ssa.OpLoadReg, ssa.OpStoreReg:
7029
7030 s.SetPos(v.Pos.WithNotStmt())
7031 default:
7032 p := v.Pos
7033 if p != src.NoXPos {
7034
7035
7036
7037
7038 if p.IsStmt() != src.PosIsStmt {
7039 if s.pp.Pos.IsStmt() == src.PosIsStmt && s.pp.Pos.SameFileAndLine(p) {
7040
7041
7042
7043
7044
7045
7046
7047
7048
7049
7050
7051
7052
7053 return
7054 }
7055 p = p.WithNotStmt()
7056
7057 }
7058 s.SetPos(p)
7059 } else {
7060 s.SetPos(s.pp.Pos.WithNotStmt())
7061 }
7062 }
7063 }
7064
7065
7066 func emitArgInfo(e *ssafn, f *ssa.Func, pp *objw.Progs) {
7067 ft := e.curfn.Type()
7068 if ft.NumRecvs() == 0 && ft.NumParams() == 0 {
7069 return
7070 }
7071
7072 x := EmitArgInfo(e.curfn, f.OwnAux.ABIInfo())
7073 x.Set(obj.AttrContentAddressable, true)
7074 e.curfn.LSym.Func().ArgInfo = x
7075
7076
7077 p := pp.Prog(obj.AFUNCDATA)
7078 p.From.SetConst(rtabi.FUNCDATA_ArgInfo)
7079 p.To.Type = obj.TYPE_MEM
7080 p.To.Name = obj.NAME_EXTERN
7081 p.To.Sym = x
7082 }
7083
7084
7085 func EmitArgInfo(f *ir.Func, abiInfo *abi.ABIParamResultInfo) *obj.LSym {
7086 x := base.Ctxt.Lookup(fmt.Sprintf("%s.arginfo%d", f.LSym.Name, f.ABI))
7087
7088
7089
7090
7091 PtrSize := int64(types.PtrSize)
7092 uintptrTyp := types.Types[types.TUINTPTR]
7093
7094 isAggregate := func(t *types.Type) bool {
7095 return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice()
7096 }
7097
7098
7099
7100
7101
7102
7103
7104
7105
7106
7107
7108
7109
7110
7111 const (
7112 _endSeq = 0xff
7113 _startAgg = 0xfe
7114 _endAgg = 0xfd
7115 _dotdotdot = 0xfc
7116 _offsetTooLarge = 0xfb
7117 _special = 0xf0
7118 )
7119
7120 const (
7121 limit = 10
7122 maxDepth = 5
7123
7124
7125
7126
7127
7128
7129 maxLen = (maxDepth*3+2)*limit + 1
7130 )
7131
7132 wOff := 0
7133 n := 0
7134 writebyte := func(o uint8) { wOff = objw.Uint8(x, wOff, o) }
7135
7136
7137 write1 := func(sz, offset int64) {
7138 if offset >= _special {
7139 writebyte(_offsetTooLarge)
7140 } else {
7141 writebyte(uint8(offset))
7142 writebyte(uint8(sz))
7143 }
7144 n++
7145 }
7146
7147
7148
7149 var visitType func(baseOffset int64, t *types.Type, depth int) bool
7150 visitType = func(baseOffset int64, t *types.Type, depth int) bool {
7151 if n >= limit {
7152 writebyte(_dotdotdot)
7153 return false
7154 }
7155 if !isAggregate(t) {
7156 write1(t.Size(), baseOffset)
7157 return true
7158 }
7159 writebyte(_startAgg)
7160 depth++
7161 if depth >= maxDepth {
7162 writebyte(_dotdotdot)
7163 writebyte(_endAgg)
7164 n++
7165 return true
7166 }
7167 switch {
7168 case t.IsInterface(), t.IsString():
7169 _ = visitType(baseOffset, uintptrTyp, depth) &&
7170 visitType(baseOffset+PtrSize, uintptrTyp, depth)
7171 case t.IsSlice():
7172 _ = visitType(baseOffset, uintptrTyp, depth) &&
7173 visitType(baseOffset+PtrSize, uintptrTyp, depth) &&
7174 visitType(baseOffset+PtrSize*2, uintptrTyp, depth)
7175 case t.IsComplex():
7176 _ = visitType(baseOffset, types.FloatForComplex(t), depth) &&
7177 visitType(baseOffset+t.Size()/2, types.FloatForComplex(t), depth)
7178 case t.IsArray():
7179 if t.NumElem() == 0 {
7180 n++
7181 break
7182 }
7183 for i := int64(0); i < t.NumElem(); i++ {
7184 if !visitType(baseOffset, t.Elem(), depth) {
7185 break
7186 }
7187 baseOffset += t.Elem().Size()
7188 }
7189 case t.IsStruct():
7190 if t.NumFields() == 0 {
7191 n++
7192 break
7193 }
7194 for _, field := range t.Fields() {
7195 if !visitType(baseOffset+field.Offset, field.Type, depth) {
7196 break
7197 }
7198 }
7199 }
7200 writebyte(_endAgg)
7201 return true
7202 }
7203
7204 start := 0
7205 if strings.Contains(f.LSym.Name, "[") {
7206
7207 start = 1
7208 }
7209
7210 for _, a := range abiInfo.InParams()[start:] {
7211 if !visitType(a.FrameOffset(abiInfo), a.Type, 0) {
7212 break
7213 }
7214 }
7215 writebyte(_endSeq)
7216 if wOff > maxLen {
7217 base.Fatalf("ArgInfo too large")
7218 }
7219
7220 return x
7221 }
7222
7223
7224 func emitWrappedFuncInfo(e *ssafn, pp *objw.Progs) {
7225 if base.Ctxt.Flag_linkshared {
7226
7227
7228 return
7229 }
7230
7231 wfn := e.curfn.WrappedFunc
7232 if wfn == nil {
7233 return
7234 }
7235
7236 wsym := wfn.Linksym()
7237 x := base.Ctxt.LookupInit(fmt.Sprintf("%s.wrapinfo", wsym.Name), func(x *obj.LSym) {
7238 objw.SymPtrOff(x, 0, wsym)
7239 x.Set(obj.AttrContentAddressable, true)
7240 })
7241 e.curfn.LSym.Func().WrapInfo = x
7242
7243
7244 p := pp.Prog(obj.AFUNCDATA)
7245 p.From.SetConst(rtabi.FUNCDATA_WrapInfo)
7246 p.To.Type = obj.TYPE_MEM
7247 p.To.Name = obj.NAME_EXTERN
7248 p.To.Sym = x
7249 }
7250
7251
7252 func genssa(f *ssa.Func, pp *objw.Progs) {
7253 var s State
7254 s.ABI = f.OwnAux.Fn.ABI()
7255
7256 e := f.Frontend().(*ssafn)
7257
7258 s.livenessMap, s.partLiveArgs = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
7259 emitArgInfo(e, f, pp)
7260 argLiveBlockMap, argLiveValueMap := liveness.ArgLiveness(e.curfn, f, pp)
7261
7262 openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
7263 if openDeferInfo != nil {
7264
7265
7266 p := pp.Prog(obj.AFUNCDATA)
7267 p.From.SetConst(rtabi.FUNCDATA_OpenCodedDeferInfo)
7268 p.To.Type = obj.TYPE_MEM
7269 p.To.Name = obj.NAME_EXTERN
7270 p.To.Sym = openDeferInfo
7271 }
7272
7273 emitWrappedFuncInfo(e, pp)
7274
7275
7276 s.bstart = make([]*obj.Prog, f.NumBlocks())
7277 s.pp = pp
7278 var progToValue map[*obj.Prog]*ssa.Value
7279 var progToBlock map[*obj.Prog]*ssa.Block
7280 var valueToProgAfter []*obj.Prog
7281 gatherPrintInfo := f.PrintOrHtmlSSA || ssa.GenssaDump[f.Name]
7282 if gatherPrintInfo {
7283 progToValue = make(map[*obj.Prog]*ssa.Value, f.NumValues())
7284 progToBlock = make(map[*obj.Prog]*ssa.Block, f.NumBlocks())
7285 f.Logf("genssa %s\n", f.Name)
7286 progToBlock[s.pp.Next] = f.Blocks[0]
7287 }
7288
7289 if base.Ctxt.Flag_locationlists {
7290 if cap(f.Cache.ValueToProgAfter) < f.NumValues() {
7291 f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues())
7292 }
7293 valueToProgAfter = f.Cache.ValueToProgAfter[:f.NumValues()]
7294 for i := range valueToProgAfter {
7295 valueToProgAfter[i] = nil
7296 }
7297 }
7298
7299
7300
7301 firstPos := src.NoXPos
7302 for _, v := range f.Entry.Values {
7303 if v.Pos.IsStmt() == src.PosIsStmt && v.Op != ssa.OpArg && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7304 firstPos = v.Pos
7305 v.Pos = firstPos.WithDefaultStmt()
7306 break
7307 }
7308 }
7309
7310
7311
7312
7313 var inlMarks map[*obj.Prog]int32
7314 var inlMarkList []*obj.Prog
7315
7316
7317
7318 var inlMarksByPos map[src.XPos][]*obj.Prog
7319
7320 var argLiveIdx int = -1
7321
7322
7323 for i, b := range f.Blocks {
7324 s.bstart[b.ID] = s.pp.Next
7325 s.lineRunStart = nil
7326 s.SetPos(s.pp.Pos.WithNotStmt())
7327
7328 if idx, ok := argLiveBlockMap[b.ID]; ok && idx != argLiveIdx {
7329 argLiveIdx = idx
7330 p := s.pp.Prog(obj.APCDATA)
7331 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7332 p.To.SetConst(int64(idx))
7333 }
7334
7335
7336 Arch.SSAMarkMoves(&s, b)
7337 for _, v := range b.Values {
7338 x := s.pp.Next
7339 s.DebugFriendlySetPosFrom(v)
7340
7341 if v.Op.ResultInArg0() && v.ResultReg() != v.Args[0].Reg() {
7342 v.Fatalf("input[0] and output not in same register %s", v.LongString())
7343 }
7344
7345 switch v.Op {
7346 case ssa.OpInitMem:
7347
7348 case ssa.OpArg:
7349
7350 case ssa.OpSP, ssa.OpSB:
7351
7352 case ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult:
7353
7354 case ssa.OpGetG:
7355
7356
7357 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, ssa.OpWBend:
7358
7359 case ssa.OpPhi:
7360 CheckLoweredPhi(v)
7361 case ssa.OpConvert:
7362
7363 if v.Args[0].Reg() != v.Reg() {
7364 v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
7365 }
7366 case ssa.OpInlMark:
7367 p := Arch.Ginsnop(s.pp)
7368 if inlMarks == nil {
7369 inlMarks = map[*obj.Prog]int32{}
7370 inlMarksByPos = map[src.XPos][]*obj.Prog{}
7371 }
7372 inlMarks[p] = v.AuxInt32()
7373 inlMarkList = append(inlMarkList, p)
7374 pos := v.Pos.AtColumn1()
7375 inlMarksByPos[pos] = append(inlMarksByPos[pos], p)
7376 firstPos = src.NoXPos
7377
7378 default:
7379
7380 if firstPos != src.NoXPos && v.Op != ssa.OpArgIntReg && v.Op != ssa.OpArgFloatReg && v.Op != ssa.OpLoadReg && v.Op != ssa.OpStoreReg {
7381 s.SetPos(firstPos)
7382 firstPos = src.NoXPos
7383 }
7384
7385
7386 s.pp.NextLive = s.livenessMap.Get(v)
7387 s.pp.NextUnsafe = s.livenessMap.GetUnsafe(v)
7388
7389
7390 Arch.SSAGenValue(&s, v)
7391 }
7392
7393 if idx, ok := argLiveValueMap[v.ID]; ok && idx != argLiveIdx {
7394 argLiveIdx = idx
7395 p := s.pp.Prog(obj.APCDATA)
7396 p.From.SetConst(rtabi.PCDATA_ArgLiveIndex)
7397 p.To.SetConst(int64(idx))
7398 }
7399
7400 if base.Ctxt.Flag_locationlists {
7401 valueToProgAfter[v.ID] = s.pp.Next
7402 }
7403
7404 if gatherPrintInfo {
7405 for ; x != s.pp.Next; x = x.Link {
7406 progToValue[x] = v
7407 }
7408 }
7409 }
7410
7411 if s.bstart[b.ID] == s.pp.Next && len(b.Succs) == 1 && b.Succs[0].Block() == b {
7412 p := Arch.Ginsnop(s.pp)
7413 p.Pos = p.Pos.WithIsStmt()
7414 if b.Pos == src.NoXPos {
7415 b.Pos = p.Pos
7416 if b.Pos == src.NoXPos {
7417 b.Pos = pp.Text.Pos
7418 }
7419 }
7420 b.Pos = b.Pos.WithBogusLine()
7421 }
7422
7423
7424
7425
7426
7427 s.pp.NextUnsafe = s.livenessMap.GetUnsafeBlock(b)
7428
7429
7430 var next *ssa.Block
7431 if i < len(f.Blocks)-1 && base.Flag.N == 0 {
7432
7433
7434
7435
7436 next = f.Blocks[i+1]
7437 }
7438 x := s.pp.Next
7439 s.SetPos(b.Pos)
7440 Arch.SSAGenBlock(&s, b, next)
7441 if gatherPrintInfo {
7442 for ; x != s.pp.Next; x = x.Link {
7443 progToBlock[x] = b
7444 }
7445 }
7446 }
7447 if f.Blocks[len(f.Blocks)-1].Kind == ssa.BlockExit {
7448
7449
7450
7451
7452 Arch.Ginsnop(pp)
7453 }
7454 if openDeferInfo != nil {
7455
7456
7457
7458 s.pp.NextLive = s.livenessMap.DeferReturn
7459 p := pp.Prog(obj.ACALL)
7460 p.To.Type = obj.TYPE_MEM
7461 p.To.Name = obj.NAME_EXTERN
7462 p.To.Sym = ir.Syms.Deferreturn
7463
7464
7465
7466
7467
7468 for _, o := range f.OwnAux.ABIInfo().OutParams() {
7469 n := o.Name
7470 rts, offs := o.RegisterTypesAndOffsets()
7471 for i := range o.Registers {
7472 Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
7473 }
7474 }
7475
7476 pp.Prog(obj.ARET)
7477 }
7478
7479 if inlMarks != nil {
7480 hasCall := false
7481
7482
7483
7484
7485 for p := pp.Text; p != nil; p = p.Link {
7486 if p.As == obj.ANOP || p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.APCALIGN || Arch.LinkArch.Family == sys.Wasm {
7487
7488
7489
7490
7491
7492 continue
7493 }
7494 if _, ok := inlMarks[p]; ok {
7495
7496
7497 continue
7498 }
7499 if p.As == obj.ACALL || p.As == obj.ADUFFCOPY || p.As == obj.ADUFFZERO {
7500 hasCall = true
7501 }
7502 pos := p.Pos.AtColumn1()
7503 s := inlMarksByPos[pos]
7504 if len(s) == 0 {
7505 continue
7506 }
7507 for _, m := range s {
7508
7509
7510
7511 p.Pos = p.Pos.WithIsStmt()
7512 pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[m])
7513
7514 m.As = obj.ANOP
7515 m.Pos = src.NoXPos
7516 m.From = obj.Addr{}
7517 m.To = obj.Addr{}
7518 }
7519 delete(inlMarksByPos, pos)
7520 }
7521
7522 for _, p := range inlMarkList {
7523 if p.As != obj.ANOP {
7524 pp.CurFunc.LSym.Func().AddInlMark(p, inlMarks[p])
7525 }
7526 }
7527
7528 if e.stksize == 0 && !hasCall {
7529
7530
7531
7532
7533
7534
7535 for p := pp.Text; p != nil; p = p.Link {
7536 if p.As == obj.AFUNCDATA || p.As == obj.APCDATA || p.As == obj.ATEXT || p.As == obj.ANOP {
7537 continue
7538 }
7539 if base.Ctxt.PosTable.Pos(p.Pos).Base().InliningIndex() >= 0 {
7540
7541 nop := Arch.Ginsnop(pp)
7542 nop.Pos = e.curfn.Pos().WithIsStmt()
7543
7544
7545
7546
7547
7548 for x := pp.Text; x != nil; x = x.Link {
7549 if x.Link == nop {
7550 x.Link = nop.Link
7551 break
7552 }
7553 }
7554
7555 for x := pp.Text; x != nil; x = x.Link {
7556 if x.Link == p {
7557 nop.Link = p
7558 x.Link = nop
7559 break
7560 }
7561 }
7562 }
7563 break
7564 }
7565 }
7566 }
7567
7568 if base.Ctxt.Flag_locationlists {
7569 var debugInfo *ssa.FuncDebug
7570 debugInfo = e.curfn.DebugInfo.(*ssa.FuncDebug)
7571 if e.curfn.ABI == obj.ABIInternal && base.Flag.N != 0 {
7572 ssa.BuildFuncDebugNoOptimized(base.Ctxt, f, base.Debug.LocationLists > 1, StackOffset, debugInfo)
7573 } else {
7574 ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists, StackOffset, debugInfo)
7575 }
7576 bstart := s.bstart
7577 idToIdx := make([]int, f.NumBlocks())
7578 for i, b := range f.Blocks {
7579 idToIdx[b.ID] = i
7580 }
7581
7582
7583
7584 debugInfo.GetPC = func(b, v ssa.ID) int64 {
7585 switch v {
7586 case ssa.BlockStart.ID:
7587 if b == f.Entry.ID {
7588 return 0
7589
7590 }
7591 return bstart[b].Pc
7592 case ssa.BlockEnd.ID:
7593 blk := f.Blocks[idToIdx[b]]
7594 nv := len(blk.Values)
7595 return valueToProgAfter[blk.Values[nv-1].ID].Pc
7596 case ssa.FuncEnd.ID:
7597 return e.curfn.LSym.Size
7598 default:
7599 return valueToProgAfter[v].Pc
7600 }
7601 }
7602 }
7603
7604
7605 for _, br := range s.Branches {
7606 br.P.To.SetTarget(s.bstart[br.B.ID])
7607 if br.P.Pos.IsStmt() != src.PosIsStmt {
7608 br.P.Pos = br.P.Pos.WithNotStmt()
7609 } else if v0 := br.B.FirstPossibleStmtValue(); v0 != nil && v0.Pos.Line() == br.P.Pos.Line() && v0.Pos.IsStmt() == src.PosIsStmt {
7610 br.P.Pos = br.P.Pos.WithNotStmt()
7611 }
7612
7613 }
7614
7615
7616 for _, jt := range s.JumpTables {
7617
7618 targets := make([]*obj.Prog, len(jt.Succs))
7619 for i, e := range jt.Succs {
7620 targets[i] = s.bstart[e.Block().ID]
7621 }
7622
7623
7624
7625 fi := pp.CurFunc.LSym.Func()
7626 fi.JumpTables = append(fi.JumpTables, obj.JumpTable{Sym: jt.Aux.(*obj.LSym), Targets: targets})
7627 }
7628
7629 if e.log {
7630 filename := ""
7631 for p := pp.Text; p != nil; p = p.Link {
7632 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7633 filename = p.InnermostFilename()
7634 f.Logf("# %s\n", filename)
7635 }
7636
7637 var s string
7638 if v, ok := progToValue[p]; ok {
7639 s = v.String()
7640 } else if b, ok := progToBlock[p]; ok {
7641 s = b.String()
7642 } else {
7643 s = " "
7644 }
7645 f.Logf(" %-6s\t%.5d (%s)\t%s\n", s, p.Pc, p.InnermostLineNumber(), p.InstructionString())
7646 }
7647 }
7648 if f.HTMLWriter != nil {
7649 var buf strings.Builder
7650 buf.WriteString("<code>")
7651 buf.WriteString("<dl class=\"ssa-gen\">")
7652 filename := ""
7653 for p := pp.Text; p != nil; p = p.Link {
7654
7655
7656 if p.Pos.IsKnown() && p.InnermostFilename() != filename {
7657 filename = p.InnermostFilename()
7658 buf.WriteString("<dt class=\"ssa-prog-src\"></dt><dd class=\"ssa-prog\">")
7659 buf.WriteString(html.EscapeString("# " + filename))
7660 buf.WriteString("</dd>")
7661 }
7662
7663 buf.WriteString("<dt class=\"ssa-prog-src\">")
7664 if v, ok := progToValue[p]; ok {
7665 buf.WriteString(v.HTML())
7666 } else if b, ok := progToBlock[p]; ok {
7667 buf.WriteString("<b>" + b.HTML() + "</b>")
7668 }
7669 buf.WriteString("</dt>")
7670 buf.WriteString("<dd class=\"ssa-prog\">")
7671 fmt.Fprintf(&buf, "%.5d <span class=\"l%v line-number\">(%s)</span> %s", p.Pc, p.InnermostLineNumber(), p.InnermostLineNumberHTML(), html.EscapeString(p.InstructionString()))
7672 buf.WriteString("</dd>")
7673 }
7674 buf.WriteString("</dl>")
7675 buf.WriteString("</code>")
7676 f.HTMLWriter.WriteColumn("genssa", "genssa", "ssa-prog", buf.String())
7677 }
7678 if ssa.GenssaDump[f.Name] {
7679 fi := f.DumpFileForPhase("genssa")
7680 if fi != nil {
7681
7682
7683 inliningDiffers := func(a, b []src.Pos) bool {
7684 if len(a) != len(b) {
7685 return true
7686 }
7687 for i := range a {
7688 if a[i].Filename() != b[i].Filename() {
7689 return true
7690 }
7691 if i != len(a)-1 && a[i].Line() != b[i].Line() {
7692 return true
7693 }
7694 }
7695 return false
7696 }
7697
7698 var allPosOld []src.Pos
7699 var allPos []src.Pos
7700
7701 for p := pp.Text; p != nil; p = p.Link {
7702 if p.Pos.IsKnown() {
7703 allPos = allPos[:0]
7704 p.Ctxt.AllPos(p.Pos, func(pos src.Pos) { allPos = append(allPos, pos) })
7705 if inliningDiffers(allPos, allPosOld) {
7706 for _, pos := range allPos {
7707 fmt.Fprintf(fi, "# %s:%d\n", pos.Filename(), pos.Line())
7708 }
7709 allPos, allPosOld = allPosOld, allPos
7710 }
7711 }
7712
7713 var s string
7714 if v, ok := progToValue[p]; ok {
7715 s = v.String()
7716 } else if b, ok := progToBlock[p]; ok {
7717 s = b.String()
7718 } else {
7719 s = " "
7720 }
7721 fmt.Fprintf(fi, " %-6s\t%.5d %s\t%s\n", s, p.Pc, ssa.StmtString(p.Pos), p.InstructionString())
7722 }
7723 fi.Close()
7724 }
7725 }
7726
7727 defframe(&s, e, f)
7728
7729 f.HTMLWriter.Close()
7730 f.HTMLWriter = nil
7731 }
7732
7733 func defframe(s *State, e *ssafn, f *ssa.Func) {
7734 pp := s.pp
7735
7736 s.maxarg = types.RoundUp(s.maxarg, e.stkalign)
7737 frame := s.maxarg + e.stksize
7738 if Arch.PadFrame != nil {
7739 frame = Arch.PadFrame(frame)
7740 }
7741
7742
7743 pp.Text.To.Type = obj.TYPE_TEXTSIZE
7744 pp.Text.To.Val = int32(types.RoundUp(f.OwnAux.ArgWidth(), int64(types.RegSize)))
7745 pp.Text.To.Offset = frame
7746
7747 p := pp.Text
7748
7749
7750
7751
7752
7753
7754
7755
7756
7757
7758 if f.OwnAux.ABIInfo().InRegistersUsed() != 0 && base.Flag.N == 0 {
7759
7760
7761 type nameOff struct {
7762 n *ir.Name
7763 off int64
7764 }
7765 partLiveArgsSpilled := make(map[nameOff]bool)
7766 for _, v := range f.Entry.Values {
7767 if v.Op.IsCall() {
7768 break
7769 }
7770 if v.Op != ssa.OpStoreReg || v.Args[0].Op != ssa.OpArgIntReg {
7771 continue
7772 }
7773 n, off := ssa.AutoVar(v)
7774 if n.Class != ir.PPARAM || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] {
7775 continue
7776 }
7777 partLiveArgsSpilled[nameOff{n, off}] = true
7778 }
7779
7780
7781 for _, a := range f.OwnAux.ABIInfo().InParams() {
7782 n := a.Name
7783 if n == nil || n.Addrtaken() || !ssa.CanSSA(n.Type()) || !s.partLiveArgs[n] || len(a.Registers) <= 1 {
7784 continue
7785 }
7786 rts, offs := a.RegisterTypesAndOffsets()
7787 for i := range a.Registers {
7788 if !rts[i].HasPointers() {
7789 continue
7790 }
7791 if partLiveArgsSpilled[nameOff{n, offs[i]}] {
7792 continue
7793 }
7794 reg := ssa.ObjRegForAbiReg(a.Registers[i], f.Config)
7795 p = Arch.SpillArgReg(pp, p, f, rts[i], reg, n, offs[i])
7796 }
7797 }
7798 }
7799
7800
7801
7802
7803 var lo, hi int64
7804
7805
7806
7807 var state uint32
7808
7809
7810
7811 for _, n := range e.curfn.Dcl {
7812 if !n.Needzero() {
7813 continue
7814 }
7815 if n.Class != ir.PAUTO {
7816 e.Fatalf(n.Pos(), "needzero class %d", n.Class)
7817 }
7818 if n.Type().Size()%int64(types.PtrSize) != 0 || n.FrameOffset()%int64(types.PtrSize) != 0 || n.Type().Size() == 0 {
7819 e.Fatalf(n.Pos(), "var %L has size %d offset %d", n, n.Type().Size(), n.Offset_)
7820 }
7821
7822 if lo != hi && n.FrameOffset()+n.Type().Size() >= lo-int64(2*types.RegSize) {
7823
7824 lo = n.FrameOffset()
7825 continue
7826 }
7827
7828
7829 p = Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7830
7831
7832 lo = n.FrameOffset()
7833 hi = lo + n.Type().Size()
7834 }
7835
7836
7837 Arch.ZeroRange(pp, p, frame+lo, hi-lo, &state)
7838 }
7839
7840
7841 type IndexJump struct {
7842 Jump obj.As
7843 Index int
7844 }
7845
7846 func (s *State) oneJump(b *ssa.Block, jump *IndexJump) {
7847 p := s.Br(jump.Jump, b.Succs[jump.Index].Block())
7848 p.Pos = b.Pos
7849 }
7850
7851
7852
7853 func (s *State) CombJump(b, next *ssa.Block, jumps *[2][2]IndexJump) {
7854 switch next {
7855 case b.Succs[0].Block():
7856 s.oneJump(b, &jumps[0][0])
7857 s.oneJump(b, &jumps[0][1])
7858 case b.Succs[1].Block():
7859 s.oneJump(b, &jumps[1][0])
7860 s.oneJump(b, &jumps[1][1])
7861 default:
7862 var q *obj.Prog
7863 if b.Likely != ssa.BranchUnlikely {
7864 s.oneJump(b, &jumps[1][0])
7865 s.oneJump(b, &jumps[1][1])
7866 q = s.Br(obj.AJMP, b.Succs[1].Block())
7867 } else {
7868 s.oneJump(b, &jumps[0][0])
7869 s.oneJump(b, &jumps[0][1])
7870 q = s.Br(obj.AJMP, b.Succs[0].Block())
7871 }
7872 q.Pos = b.Pos
7873 }
7874 }
7875
7876
7877 func AddAux(a *obj.Addr, v *ssa.Value) {
7878 AddAux2(a, v, v.AuxInt)
7879 }
7880 func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
7881 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR {
7882 v.Fatalf("bad AddAux addr %v", a)
7883 }
7884
7885 a.Offset += offset
7886
7887
7888 if v.Aux == nil {
7889 return
7890 }
7891
7892 switch n := v.Aux.(type) {
7893 case *ssa.AuxCall:
7894 a.Name = obj.NAME_EXTERN
7895 a.Sym = n.Fn
7896 case *obj.LSym:
7897 a.Name = obj.NAME_EXTERN
7898 a.Sym = n
7899 case *ir.Name:
7900 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
7901 a.Name = obj.NAME_PARAM
7902 } else {
7903 a.Name = obj.NAME_AUTO
7904 }
7905 a.Sym = n.Linksym()
7906 a.Offset += n.FrameOffset()
7907 default:
7908 v.Fatalf("aux in %s not implemented %#v", v, v.Aux)
7909 }
7910 }
7911
7912
7913
7914 func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value {
7915 size := idx.Type.Size()
7916 if size == s.config.PtrSize {
7917 return idx
7918 }
7919 if size > s.config.PtrSize {
7920
7921
7922 var lo *ssa.Value
7923 if idx.Type.IsSigned() {
7924 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TINT], idx)
7925 } else {
7926 lo = s.newValue1(ssa.OpInt64Lo, types.Types[types.TUINT], idx)
7927 }
7928 if bounded || base.Flag.B != 0 {
7929 return lo
7930 }
7931 bNext := s.f.NewBlock(ssa.BlockPlain)
7932 bPanic := s.f.NewBlock(ssa.BlockExit)
7933 hi := s.newValue1(ssa.OpInt64Hi, types.Types[types.TUINT32], idx)
7934 cmp := s.newValue2(ssa.OpEq32, types.Types[types.TBOOL], hi, s.constInt32(types.Types[types.TUINT32], 0))
7935 if !idx.Type.IsSigned() {
7936 switch kind {
7937 case ssa.BoundsIndex:
7938 kind = ssa.BoundsIndexU
7939 case ssa.BoundsSliceAlen:
7940 kind = ssa.BoundsSliceAlenU
7941 case ssa.BoundsSliceAcap:
7942 kind = ssa.BoundsSliceAcapU
7943 case ssa.BoundsSliceB:
7944 kind = ssa.BoundsSliceBU
7945 case ssa.BoundsSlice3Alen:
7946 kind = ssa.BoundsSlice3AlenU
7947 case ssa.BoundsSlice3Acap:
7948 kind = ssa.BoundsSlice3AcapU
7949 case ssa.BoundsSlice3B:
7950 kind = ssa.BoundsSlice3BU
7951 case ssa.BoundsSlice3C:
7952 kind = ssa.BoundsSlice3CU
7953 }
7954 }
7955 b := s.endBlock()
7956 b.Kind = ssa.BlockIf
7957 b.SetControl(cmp)
7958 b.Likely = ssa.BranchLikely
7959 b.AddEdgeTo(bNext)
7960 b.AddEdgeTo(bPanic)
7961
7962 s.startBlock(bPanic)
7963 mem := s.newValue4I(ssa.OpPanicExtend, types.TypeMem, int64(kind), hi, lo, len, s.mem())
7964 s.endBlock().SetControl(mem)
7965 s.startBlock(bNext)
7966
7967 return lo
7968 }
7969
7970
7971 var op ssa.Op
7972 if idx.Type.IsSigned() {
7973 switch 10*size + s.config.PtrSize {
7974 case 14:
7975 op = ssa.OpSignExt8to32
7976 case 18:
7977 op = ssa.OpSignExt8to64
7978 case 24:
7979 op = ssa.OpSignExt16to32
7980 case 28:
7981 op = ssa.OpSignExt16to64
7982 case 48:
7983 op = ssa.OpSignExt32to64
7984 default:
7985 s.Fatalf("bad signed index extension %s", idx.Type)
7986 }
7987 } else {
7988 switch 10*size + s.config.PtrSize {
7989 case 14:
7990 op = ssa.OpZeroExt8to32
7991 case 18:
7992 op = ssa.OpZeroExt8to64
7993 case 24:
7994 op = ssa.OpZeroExt16to32
7995 case 28:
7996 op = ssa.OpZeroExt16to64
7997 case 48:
7998 op = ssa.OpZeroExt32to64
7999 default:
8000 s.Fatalf("bad unsigned index extension %s", idx.Type)
8001 }
8002 }
8003 return s.newValue1(op, types.Types[types.TINT], idx)
8004 }
8005
8006
8007
8008 func CheckLoweredPhi(v *ssa.Value) {
8009 if v.Op != ssa.OpPhi {
8010 v.Fatalf("CheckLoweredPhi called with non-phi value: %v", v.LongString())
8011 }
8012 if v.Type.IsMemory() {
8013 return
8014 }
8015 f := v.Block.Func
8016 loc := f.RegAlloc[v.ID]
8017 for _, a := range v.Args {
8018 if aloc := f.RegAlloc[a.ID]; aloc != loc {
8019 v.Fatalf("phi arg at different location than phi: %v @ %s, but arg %v @ %s\n%s\n", v, loc, a, aloc, v.Block.Func)
8020 }
8021 }
8022 }
8023
8024
8025
8026
8027
8028 func CheckLoweredGetClosurePtr(v *ssa.Value) {
8029 entry := v.Block.Func.Entry
8030 if entry != v.Block {
8031 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8032 }
8033 for _, w := range entry.Values {
8034 if w == v {
8035 break
8036 }
8037 switch w.Op {
8038 case ssa.OpArgIntReg, ssa.OpArgFloatReg:
8039
8040 default:
8041 base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v)
8042 }
8043 }
8044 }
8045
8046
8047 func CheckArgReg(v *ssa.Value) {
8048 entry := v.Block.Func.Entry
8049 if entry != v.Block {
8050 base.Fatalf("in %s, badly placed ArgIReg or ArgFReg: %v %v", v.Block.Func.Name, v.Block, v)
8051 }
8052 }
8053
8054 func AddrAuto(a *obj.Addr, v *ssa.Value) {
8055 n, off := ssa.AutoVar(v)
8056 a.Type = obj.TYPE_MEM
8057 a.Sym = n.Linksym()
8058 a.Reg = int16(Arch.REGSP)
8059 a.Offset = n.FrameOffset() + off
8060 if n.Class == ir.PPARAM || (n.Class == ir.PPARAMOUT && !n.IsOutputParamInRegisters()) {
8061 a.Name = obj.NAME_PARAM
8062 } else {
8063 a.Name = obj.NAME_AUTO
8064 }
8065 }
8066
8067
8068
8069 func (s *State) Call(v *ssa.Value) *obj.Prog {
8070 pPosIsStmt := s.pp.Pos.IsStmt()
8071 s.PrepareCall(v)
8072
8073 p := s.Prog(obj.ACALL)
8074 if pPosIsStmt == src.PosIsStmt {
8075 p.Pos = v.Pos.WithIsStmt()
8076 } else {
8077 p.Pos = v.Pos.WithNotStmt()
8078 }
8079 if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
8080 p.To.Type = obj.TYPE_MEM
8081 p.To.Name = obj.NAME_EXTERN
8082 p.To.Sym = sym.Fn
8083 } else {
8084
8085 switch Arch.LinkArch.Family {
8086 case sys.AMD64, sys.I386, sys.PPC64, sys.RISCV64, sys.S390X, sys.Wasm:
8087 p.To.Type = obj.TYPE_REG
8088 case sys.ARM, sys.ARM64, sys.Loong64, sys.MIPS, sys.MIPS64:
8089 p.To.Type = obj.TYPE_MEM
8090 default:
8091 base.Fatalf("unknown indirect call family")
8092 }
8093 p.To.Reg = v.Args[0].Reg()
8094 }
8095 return p
8096 }
8097
8098
8099
8100 func (s *State) TailCall(v *ssa.Value) *obj.Prog {
8101 p := s.Call(v)
8102 p.As = obj.ARET
8103 return p
8104 }
8105
8106
8107
8108
8109 func (s *State) PrepareCall(v *ssa.Value) {
8110 idx := s.livenessMap.Get(v)
8111 if !idx.StackMapValid() {
8112
8113 if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
8114 base.Fatalf("missing stack map index for %v", v.LongString())
8115 }
8116 }
8117
8118 call, ok := v.Aux.(*ssa.AuxCall)
8119
8120 if ok {
8121
8122
8123 if nowritebarrierrecCheck != nil {
8124 nowritebarrierrecCheck.recordCall(s.pp.CurFunc, call.Fn, v.Pos)
8125 }
8126 }
8127
8128 if s.maxarg < v.AuxInt {
8129 s.maxarg = v.AuxInt
8130 }
8131 }
8132
8133
8134
8135 func (s *State) UseArgs(n int64) {
8136 if s.maxarg < n {
8137 s.maxarg = n
8138 }
8139 }
8140
8141
8142 func fieldIdx(n *ir.SelectorExpr) int {
8143 t := n.X.Type()
8144 if !t.IsStruct() {
8145 panic("ODOT's LHS is not a struct")
8146 }
8147
8148 for i, f := range t.Fields() {
8149 if f.Sym == n.Sel {
8150 if f.Offset != n.Offset() {
8151 panic("field offset doesn't match")
8152 }
8153 return i
8154 }
8155 }
8156 panic(fmt.Sprintf("can't find field in expr %v\n", n))
8157
8158
8159
8160 }
8161
8162
8163
8164 type ssafn struct {
8165 curfn *ir.Func
8166 strings map[string]*obj.LSym
8167 stksize int64
8168 stkptrsize int64
8169
8170
8171
8172
8173
8174 stkalign int64
8175
8176 log bool
8177 }
8178
8179
8180
8181 func (e *ssafn) StringData(s string) *obj.LSym {
8182 if aux, ok := e.strings[s]; ok {
8183 return aux
8184 }
8185 if e.strings == nil {
8186 e.strings = make(map[string]*obj.LSym)
8187 }
8188 data := staticdata.StringSym(e.curfn.Pos(), s)
8189 e.strings[s] = data
8190 return data
8191 }
8192
8193
8194 func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
8195 node := parent.N
8196
8197 if node.Class != ir.PAUTO || node.Addrtaken() {
8198
8199 return ssa.LocalSlot{N: node, Type: t, Off: parent.Off + offset}
8200 }
8201
8202 sym := &types.Sym{Name: node.Sym().Name + suffix, Pkg: types.LocalPkg}
8203 n := e.curfn.NewLocal(parent.N.Pos(), sym, t)
8204 n.SetUsed(true)
8205 n.SetEsc(ir.EscNever)
8206 types.CalcSize(t)
8207 return ssa.LocalSlot{N: n, Type: t, Off: 0, SplitOf: parent, SplitOffset: offset}
8208 }
8209
8210
8211 func (e *ssafn) Logf(msg string, args ...interface{}) {
8212 if e.log {
8213 fmt.Printf(msg, args...)
8214 }
8215 }
8216
8217 func (e *ssafn) Log() bool {
8218 return e.log
8219 }
8220
8221
8222 func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) {
8223 base.Pos = pos
8224 nargs := append([]interface{}{ir.FuncName(e.curfn)}, args...)
8225 base.Fatalf("'%s': "+msg, nargs...)
8226 }
8227
8228
8229
8230 func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) {
8231 base.WarnfAt(pos, fmt_, args...)
8232 }
8233
8234 func (e *ssafn) Debug_checknil() bool {
8235 return base.Debug.Nil != 0
8236 }
8237
8238 func (e *ssafn) UseWriteBarrier() bool {
8239 return base.Flag.WB
8240 }
8241
8242 func (e *ssafn) Syslook(name string) *obj.LSym {
8243 switch name {
8244 case "goschedguarded":
8245 return ir.Syms.Goschedguarded
8246 case "writeBarrier":
8247 return ir.Syms.WriteBarrier
8248 case "wbZero":
8249 return ir.Syms.WBZero
8250 case "wbMove":
8251 return ir.Syms.WBMove
8252 case "cgoCheckMemmove":
8253 return ir.Syms.CgoCheckMemmove
8254 case "cgoCheckPtrWrite":
8255 return ir.Syms.CgoCheckPtrWrite
8256 }
8257 e.Fatalf(src.NoXPos, "unknown Syslook func %v", name)
8258 return nil
8259 }
8260
8261 func (e *ssafn) Func() *ir.Func {
8262 return e.curfn
8263 }
8264
8265 func clobberBase(n ir.Node) ir.Node {
8266 if n.Op() == ir.ODOT {
8267 n := n.(*ir.SelectorExpr)
8268 if n.X.Type().NumFields() == 1 {
8269 return clobberBase(n.X)
8270 }
8271 }
8272 if n.Op() == ir.OINDEX {
8273 n := n.(*ir.IndexExpr)
8274 if n.X.Type().IsArray() && n.X.Type().NumElem() == 1 {
8275 return clobberBase(n.X)
8276 }
8277 }
8278 return n
8279 }
8280
8281
8282 func callTargetLSym(callee *ir.Name) *obj.LSym {
8283 if callee.Func == nil {
8284
8285
8286
8287 return callee.Linksym()
8288 }
8289
8290 return callee.LinksymABI(callee.Func.ABI)
8291 }
8292
8293 func min8(a, b int8) int8 {
8294 if a < b {
8295 return a
8296 }
8297 return b
8298 }
8299
8300 func max8(a, b int8) int8 {
8301 if a > b {
8302 return a
8303 }
8304 return b
8305 }
8306
8307
8308 const deferStructFnField = 4
8309
8310 var deferType *types.Type
8311
8312
8313
8314 func deferstruct() *types.Type {
8315 if deferType != nil {
8316 return deferType
8317 }
8318
8319 makefield := func(name string, t *types.Type) *types.Field {
8320 sym := (*types.Pkg)(nil).Lookup(name)
8321 return types.NewField(src.NoXPos, sym, t)
8322 }
8323
8324 fields := []*types.Field{
8325 makefield("heap", types.Types[types.TBOOL]),
8326 makefield("rangefunc", types.Types[types.TBOOL]),
8327 makefield("sp", types.Types[types.TUINTPTR]),
8328 makefield("pc", types.Types[types.TUINTPTR]),
8329
8330
8331
8332 makefield("fn", types.Types[types.TUINTPTR]),
8333 makefield("link", types.Types[types.TUINTPTR]),
8334 makefield("head", types.Types[types.TUINTPTR]),
8335 }
8336 if name := fields[deferStructFnField].Sym.Name; name != "fn" {
8337 base.Fatalf("deferStructFnField is %q, not fn", name)
8338 }
8339
8340 n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.Pkgs.Runtime.Lookup("_defer"))
8341 typ := types.NewNamed(n)
8342 n.SetType(typ)
8343 n.SetTypecheck(1)
8344
8345
8346 typ.SetUnderlying(types.NewStruct(fields))
8347 types.CalcStructSize(typ)
8348
8349 deferType = typ
8350 return typ
8351 }
8352
8353
8354
8355
8356
8357 func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
8358 return obj.Addr{
8359 Name: obj.NAME_NONE,
8360 Type: obj.TYPE_MEM,
8361 Reg: baseReg,
8362 Offset: spill.Offset + extraOffset,
8363 }
8364 }
8365
8366 var (
8367 BoundsCheckFunc [ssa.BoundsKindCount]*obj.LSym
8368 ExtendCheckFunc [ssa.BoundsKindCount]*obj.LSym
8369 )
8370
View as plain text