1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/reflectdata"
13 "cmd/compile/internal/ssagen"
14 "cmd/compile/internal/typecheck"
15 "cmd/compile/internal/types"
16 "cmd/internal/src"
17 )
18
19
20 const tmpstringbufsize = 32
21
22 func Walk(fn *ir.Func) {
23 ir.CurFunc = fn
24 errorsBefore := base.Errors()
25 order(fn)
26 if base.Errors() > errorsBefore {
27 return
28 }
29
30 if base.Flag.W != 0 {
31 s := fmt.Sprintf("\nbefore walk %v", ir.CurFunc.Sym())
32 ir.DumpList(s, ir.CurFunc.Body)
33 }
34
35 lno := base.Pos
36
37 base.Pos = lno
38 if base.Errors() > errorsBefore {
39 return
40 }
41 walkStmtList(ir.CurFunc.Body)
42 if base.Flag.W != 0 {
43 s := fmt.Sprintf("after walk %v", ir.CurFunc.Sym())
44 ir.DumpList(s, ir.CurFunc.Body)
45 }
46
47
48 for _, n := range fn.Dcl {
49 types.CalcSize(n.Type())
50 }
51 }
52
53
54 func walkRecv(n *ir.UnaryExpr) ir.Node {
55 if n.Typecheck() == 0 {
56 base.Fatalf("missing typecheck: %+v", n)
57 }
58 init := ir.TakeInit(n)
59
60 n.X = walkExpr(n.X, &init)
61 call := walkExpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
62 return ir.InitExpr(init, call)
63 }
64
65 func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
66 if n.Op() != ir.OAS {
67 base.Fatalf("convas: not OAS %v", n.Op())
68 }
69 n.SetTypecheck(1)
70
71 if n.X == nil || n.Y == nil {
72 return n
73 }
74
75 lt := n.X.Type()
76 rt := n.Y.Type()
77 if lt == nil || rt == nil {
78 return n
79 }
80
81 if ir.IsBlank(n.X) {
82 n.Y = typecheck.DefaultLit(n.Y, nil)
83 return n
84 }
85
86 if !types.Identical(lt, rt) {
87 n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
88 n.Y = walkExpr(n.Y, init)
89 }
90 types.CalcSize(n.Y.Type())
91
92 return n
93 }
94
95 func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallExpr {
96 if init == nil {
97 base.Fatalf("mkcall with nil init: %v", fn)
98 }
99 if fn.Type() == nil || fn.Type().Kind() != types.TFUNC {
100 base.Fatalf("mkcall %v %v", fn, fn.Type())
101 }
102
103 n := fn.Type().NumParams()
104 if n != len(va) {
105 base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
106 }
107
108 call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
109 call.SetType(t)
110 return walkExpr(call, init).(*ir.CallExpr)
111 }
112
113 func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
114 return vmkcall(typecheck.LookupRuntime(name), t, init, args)
115 }
116
117 func mkcallstmt(name string, args ...ir.Node) ir.Node {
118 return mkcallstmt1(typecheck.LookupRuntime(name), args...)
119 }
120
121 func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
122 return vmkcall(fn, t, init, args)
123 }
124
125 func mkcallstmt1(fn ir.Node, args ...ir.Node) ir.Node {
126 var init ir.Nodes
127 n := vmkcall(fn, nil, &init, args)
128 if len(init) == 0 {
129 return n
130 }
131 init.Append(n)
132 return ir.NewBlockStmt(n.Pos(), init)
133 }
134
135 func chanfn(name string, n int, t *types.Type) ir.Node {
136 if !t.IsChan() {
137 base.Fatalf("chanfn %v", t)
138 }
139 switch n {
140 case 1:
141 return typecheck.LookupRuntime(name, t.Elem())
142 case 2:
143 return typecheck.LookupRuntime(name, t.Elem(), t.Elem())
144 }
145 base.Fatalf("chanfn %d", n)
146 return nil
147 }
148
149 func mapfn(name string, t *types.Type, isfat bool) ir.Node {
150 if !t.IsMap() {
151 base.Fatalf("mapfn %v", t)
152 }
153 if mapfast(t) == mapslow || isfat {
154 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key(), t.Elem())
155 }
156 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Elem())
157 }
158
159 func mapfndel(name string, t *types.Type) ir.Node {
160 if !t.IsMap() {
161 base.Fatalf("mapfn %v", t)
162 }
163 if mapfast(t) == mapslow {
164 return typecheck.LookupRuntime(name, t.Key(), t.Elem(), t.Key())
165 }
166 return typecheck.LookupRuntime(name, t.Key(), t.Elem())
167 }
168
169 const (
170 mapslow = iota
171 mapfast32
172 mapfast32ptr
173 mapfast64
174 mapfast64ptr
175 mapfaststr
176 nmapfast
177 )
178
179 type mapnames [nmapfast]string
180
181 func mkmapnames(base string, ptr string) mapnames {
182 return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
183 }
184
185 var mapaccess1 = mkmapnames("mapaccess1", "")
186 var mapaccess2 = mkmapnames("mapaccess2", "")
187 var mapassign = mkmapnames("mapassign", "ptr")
188 var mapdelete = mkmapnames("mapdelete", "")
189
190 func mapfast(t *types.Type) int {
191
192 if t.Elem().Size() > 128 {
193 return mapslow
194 }
195 switch reflectdata.AlgType(t.Key()) {
196 case types.AMEM32:
197 if !t.Key().HasPointers() {
198 return mapfast32
199 }
200 if types.PtrSize == 4 {
201 return mapfast32ptr
202 }
203 base.Fatalf("small pointer %v", t.Key())
204 case types.AMEM64:
205 if !t.Key().HasPointers() {
206 return mapfast64
207 }
208 if types.PtrSize == 8 {
209 return mapfast64ptr
210 }
211
212
213 case types.ASTRING:
214 return mapfaststr
215 }
216 return mapslow
217 }
218
219 func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
220 walkExprListSafe(n.Args, init)
221
222
223
224
225 ls := n.Args
226 for i1, n1 := range ls {
227 ls[i1] = cheapExpr(n1, init)
228 }
229 }
230
231
232 func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
233 op := stmt.Op()
234 n := typecheck.Stmt(stmt)
235 if op == ir.OAS || op == ir.OAS2 {
236
237
238
239
240 n = walkExpr(n, init)
241 } else {
242 n = walkStmt(n)
243 }
244 init.Append(n)
245 }
246
247
248
249 const maxOpenDefers = 8
250
251
252
253 func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
254 var init ir.Nodes
255 c := cheapExpr(n, &init)
256 if c != n || len(init) != 0 {
257 base.Fatalf("backingArrayPtrLen not cheap: %v", n)
258 }
259 ptr = ir.NewUnaryExpr(base.Pos, ir.OSPTR, n)
260 if n.Type().IsString() {
261 ptr.SetType(types.Types[types.TUINT8].PtrTo())
262 } else {
263 ptr.SetType(n.Type().Elem().PtrTo())
264 }
265 ptr.SetTypecheck(1)
266 length = ir.NewUnaryExpr(base.Pos, ir.OLEN, n)
267 length.SetType(types.Types[types.TINT])
268 length.SetTypecheck(1)
269 return ptr, length
270 }
271
272
273
274
275 func mayCall(n ir.Node) bool {
276
277 if base.Flag.Cfg.Instrumenting {
278 return true
279 }
280
281 isSoftFloat := func(typ *types.Type) bool {
282 return types.IsFloat[typ.Kind()] || types.IsComplex[typ.Kind()]
283 }
284
285 return ir.Any(n, func(n ir.Node) bool {
286
287
288 if len(n.Init()) != 0 {
289 base.FatalfAt(n.Pos(), "mayCall %+v", n)
290 }
291
292 switch n.Op() {
293 default:
294 base.FatalfAt(n.Pos(), "mayCall %+v", n)
295
296 case ir.OCALLFUNC, ir.OCALLINTER,
297 ir.OUNSAFEADD, ir.OUNSAFESLICE:
298 return true
299
300 case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
301 ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD,
302 ir.OSLICE2ARR, ir.OSLICE2ARRPTR:
303
304
305 return true
306
307 case ir.OANDAND, ir.OOROR:
308 n := n.(*ir.LogicalExpr)
309
310
311
312
313 return len(n.Y.Init()) != 0
314
315
316
317 case ir.OADD, ir.OSUB, ir.OMUL, ir.ONEG:
318 return ssagen.Arch.SoftFloat && isSoftFloat(n.Type())
319 case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
320 n := n.(*ir.BinaryExpr)
321 return ssagen.Arch.SoftFloat && isSoftFloat(n.X.Type())
322 case ir.OCONV:
323 n := n.(*ir.ConvExpr)
324 return ssagen.Arch.SoftFloat && (isSoftFloat(n.Type()) || isSoftFloat(n.X.Type()))
325
326 case ir.OMIN, ir.OMAX:
327
328 return n.Type().IsString() || n.Type().IsFloat()
329
330 case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OLINKSYMOFFSET, ir.OMETHEXPR,
331 ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOMPLEX, ir.OMAKEFACE,
332 ir.OADDR, ir.OBITNOT, ir.ONOT, ir.OPLUS,
333 ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
334 ir.OCONVNOP, ir.ODOT,
335 ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
336 ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OSLICEHEADER, ir.OSTRINGHEADER:
337
338
339 }
340
341 return false
342 })
343 }
344
345
346 func itabType(itab ir.Node) ir.Node {
347 if itabTypeField == nil {
348
349 itabTypeField = runtimeField("_type", int64(types.PtrSize), types.NewPtr(types.Types[types.TUINT8]))
350 }
351 return boundedDotPtr(base.Pos, itab, itabTypeField)
352 }
353
354 var itabTypeField *types.Field
355
356
357
358 func boundedDotPtr(pos src.XPos, ptr ir.Node, field *types.Field) *ir.SelectorExpr {
359 sel := ir.NewSelectorExpr(pos, ir.ODOTPTR, ptr, field.Sym)
360 sel.Selection = field
361 sel.SetType(field.Type)
362 sel.SetTypecheck(1)
363 sel.SetBounded(true)
364 return sel
365 }
366
367 func runtimeField(name string, offset int64, typ *types.Type) *types.Field {
368 f := types.NewField(src.NoXPos, ir.Pkgs.Runtime.Lookup(name), typ)
369 f.Offset = offset
370 return f
371 }
372
373
374
375
376 func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
377 if t.IsInterface() {
378 base.Fatalf("ifaceData interface: %v", t)
379 }
380 ptr := ir.NewUnaryExpr(pos, ir.OIDATA, n)
381 if types.IsDirectIface(t) {
382 ptr.SetType(t)
383 ptr.SetTypecheck(1)
384 return ptr
385 }
386 ptr.SetType(types.NewPtr(t))
387 ptr.SetTypecheck(1)
388 ind := ir.NewStarExpr(pos, ptr)
389 ind.SetType(t)
390 ind.SetTypecheck(1)
391 ind.SetBounded(true)
392 return ind
393 }
394
View as plain text