1
2
3
4
5 package walk
6
7 import (
8 "fmt"
9 "go/constant"
10 "go/token"
11 "strings"
12
13 "cmd/compile/internal/base"
14 "cmd/compile/internal/escape"
15 "cmd/compile/internal/ir"
16 "cmd/compile/internal/reflectdata"
17 "cmd/compile/internal/typecheck"
18 "cmd/compile/internal/types"
19 )
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43 func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
44 if !ir.SameSafeExpr(dst, n.Args[0]) {
45 n.Args[0] = safeExpr(n.Args[0], init)
46 n.Args[0] = walkExpr(n.Args[0], init)
47 }
48 walkExprListSafe(n.Args[1:], init)
49
50 nsrc := n.Args[0]
51
52
53
54
55
56
57
58 ls := n.Args[1:]
59 for i, n := range ls {
60 n = cheapExpr(n, init)
61 if !types.Identical(n.Type(), nsrc.Type().Elem()) {
62 n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
63 n = walkExpr(n, init)
64 }
65 ls[i] = n
66 }
67
68 argc := len(n.Args) - 1
69 if argc < 1 {
70 return nsrc
71 }
72
73
74
75 if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
76 return n
77 }
78
79 var l []ir.Node
80
81
82 s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())
83 l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
84
85
86 num := ir.NewInt(base.Pos, int64(argc))
87
88
89 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
90 l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
91
92
93 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
94 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
95 nif.Likely = true
96
97
98 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
99 slice.SetBounded(true)
100 nif.Body = []ir.Node{
101 ir.NewAssignStmt(base.Pos, s, slice),
102 }
103
104
105 nif.Else = []ir.Node{
106 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
107 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
108 newLen,
109 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
110 num)),
111 }
112
113 l = append(l, nif)
114
115 ls = n.Args[1:]
116 for i, n := range ls {
117
118 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i))))
119 ix.SetBounded(true)
120 l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
121 }
122
123 typecheck.Stmts(l)
124 walkStmtList(l)
125 init.Append(l...)
126 return s
127 }
128
129
130 func walkGrowslice(slice *ir.Name, init *ir.Nodes, oldPtr, newLen, oldCap, num ir.Node) *ir.CallExpr {
131 elemtype := slice.Type().Elem()
132 fn := typecheck.LookupRuntime("growslice", elemtype, elemtype)
133 elemtypeptr := reflectdata.TypePtrAt(base.Pos, elemtype)
134 return mkcall1(fn, slice.Type(), init, oldPtr, newLen, oldCap, num, elemtypeptr)
135 }
136
137
138 func walkClear(n *ir.UnaryExpr) ir.Node {
139 typ := n.X.Type()
140 switch {
141 case typ.IsSlice():
142 if n := arrayClear(n.X.Pos(), n.X, nil); n != nil {
143 return n
144 }
145
146 return ir.NewBlockStmt(n.Pos(), nil)
147 case typ.IsMap():
148 return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type()))
149 }
150 panic("unreachable")
151 }
152
153
154 func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
155
156 fn := typecheck.LookupRuntime("closechan", n.X.Type())
157 return mkcall1(fn, nil, init, n.X)
158 }
159
160
161
162
163
164
165
166
167
168
169
170 func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
171 if n.X.Type().Elem().HasPointers() {
172 ir.CurFunc.SetWBPos(n.Pos())
173 fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
174 n.X = cheapExpr(n.X, init)
175 ptrL, lenL := backingArrayPtrLen(n.X)
176 n.Y = cheapExpr(n.Y, init)
177 ptrR, lenR := backingArrayPtrLen(n.Y)
178 return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
179 }
180
181 if runtimecall {
182
183
184
185
186 n.X = cheapExpr(n.X, init)
187 ptrL, lenL := backingArrayPtrLen(n.X)
188 n.Y = cheapExpr(n.Y, init)
189 ptrR, lenR := backingArrayPtrLen(n.Y)
190
191 fn := typecheck.LookupRuntime("slicecopy", ptrL.Type().Elem(), ptrR.Type().Elem())
192
193 return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size()))
194 }
195
196 n.X = walkExpr(n.X, init)
197 n.Y = walkExpr(n.Y, init)
198 nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type())
199 nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type())
200 var l []ir.Node
201 l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
202 l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
203
204 nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
205 nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
206
207 nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
208
209
210 l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
211
212
213 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
214
215 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
216 nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
217 l = append(l, nif)
218
219
220 ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
221 ne.Likely = true
222 l = append(l, ne)
223
224 fn := typecheck.LookupRuntime("memmove", nl.Type().Elem(), nl.Type().Elem())
225 nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]))
226 setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
227 ne.Body.Append(setwid)
228 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size()))
229 call := mkcall1(fn, nil, init, nto, nfrm, nwid)
230 ne.Body.Append(call)
231
232 typecheck.Stmts(l)
233 walkStmtList(l)
234 init.Append(l...)
235 return nlen
236 }
237
238
239 func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
240 init.Append(ir.TakeInit(n)...)
241 map_ := n.Args[0]
242 key := n.Args[1]
243 map_ = walkExpr(map_, init)
244 key = walkExpr(key, init)
245
246 t := map_.Type()
247 fast := mapfast(t)
248 key = mapKeyArg(fast, n, key, false)
249 return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
250 }
251
252
253 func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
254 if isRuneCount(n) {
255
256 return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
257 }
258 if isByteCount(n) {
259 conv := n.X.(*ir.ConvExpr)
260 walkStmtList(conv.Init())
261 init.Append(ir.TakeInit(conv)...)
262 _, len := backingArrayPtrLen(cheapExpr(conv.X, init))
263 return len
264 }
265
266 n.X = walkExpr(n.X, init)
267
268
269
270 t := n.X.Type()
271
272 if t.IsPtr() {
273 t = t.Elem()
274 }
275 if t.IsArray() {
276 safeExpr(n.X, init)
277 con := ir.NewConstExpr(constant.MakeInt64(t.NumElem()), n)
278 con.SetTypecheck(1)
279 return con
280 }
281 return n
282 }
283
284
285 func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
286
287
288 size := n.Len
289 fnname := "makechan64"
290 argtype := types.Types[types.TINT64]
291
292
293
294
295 if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
296 fnname = "makechan"
297 argtype = types.Types[types.TINT]
298 }
299
300 return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
301 }
302
303
304 func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
305 t := n.Type()
306 hmapType := reflectdata.MapType()
307 hint := n.Len
308
309
310 var h ir.Node
311 if n.Esc() == ir.EscNone {
312
313
314
315
316 h = stackTempAddr(init, hmapType)
317
318
319
320
321
322
323 if !ir.IsConst(hint, constant.Int) ||
324 constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
325
326
327
328
329
330
331
332
333
334
335 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, reflectdata.BUCKETSIZE)), nil, nil)
336 nif.Likely = true
337
338
339
340 b := stackTempAddr(&nif.Body, reflectdata.MapBucketType(t))
341
342
343 bsym := hmapType.Field(5).Sym
344 na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), typecheck.ConvNop(b, types.Types[types.TUNSAFEPTR]))
345 nif.Body.Append(na)
346 appendWalkStmt(init, nif)
347 }
348 }
349
350 if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(reflectdata.BUCKETSIZE)) {
351
352
353
354
355
356
357
358 if n.Esc() == ir.EscNone {
359
360
361
362 rand := mkcall("rand32", types.Types[types.TUINT32], init)
363 hashsym := hmapType.Field(4).Sym
364 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
365 return typecheck.ConvNop(h, t)
366 }
367
368
369 fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem())
370 return mkcall1(fn, n.Type(), init)
371 }
372
373 if n.Esc() != ir.EscNone {
374 h = typecheck.NodNil()
375 }
376
377
378
379
380
381
382
383 fnname := "makemap64"
384 argtype := types.Types[types.TINT64]
385
386
387
388
389
390 if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
391 fnname = "makemap"
392 argtype = types.Types[types.TINT]
393 }
394
395 fn := typecheck.LookupRuntime(fnname, hmapType, t.Key(), t.Elem())
396 return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h)
397 }
398
399
400 func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
401 l := n.Len
402 r := n.Cap
403 if r == nil {
404 r = safeExpr(l, init)
405 l = r
406 }
407 t := n.Type()
408 if t.Elem().NotInHeap() {
409 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
410 }
411 if n.Esc() == ir.EscNone {
412 if why := escape.HeapAllocReason(n); why != "" {
413 base.Fatalf("%v has EscNone, but %v", n, why)
414 }
415
416
417 i := typecheck.IndexConst(r)
418 if i < 0 {
419 base.Fatalf("walkExpr: invalid index %v", r)
420 }
421
422
423
424
425
426
427
428
429 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(base.Pos, i)), nil, nil)
430 niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(base.Pos, 0)), nil, nil)
431 niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
432 nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
433 init.Append(typecheck.Stmt(nif))
434
435 t = types.NewArray(t.Elem(), i)
436 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, t)
437 appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))
438 r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil)
439
440 return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
441 }
442
443
444
445
446
447 len, cap := l, r
448
449 fnname := "makeslice64"
450 argtype := types.Types[types.TINT64]
451
452
453
454
455 if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
456 (cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
457 fnname = "makeslice"
458 argtype = types.Types[types.TINT]
459 }
460 fn := typecheck.LookupRuntime(fnname)
461 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
462 ptr.MarkNonNil()
463 len = typecheck.Conv(len, types.Types[types.TINT])
464 cap = typecheck.Conv(cap, types.Types[types.TINT])
465 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
466 return walkExpr(typecheck.Expr(sh), init)
467 }
468
469
470 func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
471 if n.Esc() == ir.EscNone {
472 base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
473 }
474
475 t := n.Type()
476 if t.Elem().NotInHeap() {
477 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
478 }
479
480 length := typecheck.Conv(n.Len, types.Types[types.TINT])
481 copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
482 copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
483
484 if !t.Elem().HasPointers() && n.Bounded() {
485
486
487
488
489
490
491 size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR]))
492
493
494 fn := typecheck.LookupRuntime("mallocgc")
495 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false))
496 ptr.MarkNonNil()
497 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
498
499 s := typecheck.TempAt(base.Pos, ir.CurFunc, t)
500 r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
501 r = walkExpr(r, init)
502 init.Append(r)
503
504
505 fn = typecheck.LookupRuntime("memmove", t.Elem(), t.Elem())
506 ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
507 init.Append(walkExpr(typecheck.Stmt(ncopy), init))
508
509 return s
510 }
511
512
513 fn := typecheck.LookupRuntime("makeslicecopy")
514 ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
515 ptr.MarkNonNil()
516 sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
517 return walkExpr(typecheck.Expr(sh), init)
518 }
519
520
521 func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
522 t := n.Type().Elem()
523 if t.NotInHeap() {
524 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
525 }
526 if n.Esc() == ir.EscNone {
527 if t.Size() > ir.MaxImplicitStackVarSize {
528 base.Fatalf("large ONEW with EscNone: %v", n)
529 }
530 return stackTempAddr(init, t)
531 }
532 types.CalcSize(t)
533 n.MarkNonNil()
534 return n
535 }
536
537 func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node {
538 init.Append(ir.TakeInit(n)...)
539 walkExprList(n.Args, init)
540 return n
541 }
542
543
544 func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
545
546 walkExprListCheap(nn.Args, init)
547
548
549 if nn.Op() == ir.OPRINTLN {
550 s := nn.Args
551 t := make([]ir.Node, 0, len(s)*2)
552 for i, n := range s {
553 if i != 0 {
554 t = append(t, ir.NewString(base.Pos, " "))
555 }
556 t = append(t, n)
557 }
558 t = append(t, ir.NewString(base.Pos, "\n"))
559 nn.Args = t
560 }
561
562
563 s := nn.Args
564 t := make([]ir.Node, 0, len(s))
565 for i := 0; i < len(s); {
566 var strs []string
567 for i < len(s) && ir.IsConst(s[i], constant.String) {
568 strs = append(strs, ir.StringVal(s[i]))
569 i++
570 }
571 if len(strs) > 0 {
572 t = append(t, ir.NewString(base.Pos, strings.Join(strs, "")))
573 }
574 if i < len(s) {
575 t = append(t, s[i])
576 i++
577 }
578 }
579 nn.Args = t
580
581 calls := []ir.Node{mkcall("printlock", nil, init)}
582 for i, n := range nn.Args {
583 if n.Op() == ir.OLITERAL {
584 if n.Type() == types.UntypedRune {
585 n = typecheck.DefaultLit(n, types.RuneType)
586 }
587
588 switch n.Val().Kind() {
589 case constant.Int:
590 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
591
592 case constant.Float:
593 n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
594 }
595 }
596
597 if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
598 n = typecheck.DefaultLit(n, types.Types[types.TINT64])
599 }
600 n = typecheck.DefaultLit(n, nil)
601 nn.Args[i] = n
602 if n.Type() == nil || n.Type().Kind() == types.TFORW {
603 continue
604 }
605
606 var on *ir.Name
607 switch n.Type().Kind() {
608 case types.TINTER:
609 if n.Type().IsEmptyInterface() {
610 on = typecheck.LookupRuntime("printeface", n.Type())
611 } else {
612 on = typecheck.LookupRuntime("printiface", n.Type())
613 }
614 case types.TPTR:
615 if n.Type().Elem().NotInHeap() {
616 on = typecheck.LookupRuntime("printuintptr")
617 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
618 n.SetType(types.Types[types.TUNSAFEPTR])
619 n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
620 n.SetType(types.Types[types.TUINTPTR])
621 break
622 }
623 fallthrough
624 case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
625 on = typecheck.LookupRuntime("printpointer", n.Type())
626 case types.TSLICE:
627 on = typecheck.LookupRuntime("printslice", n.Type())
628 case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
629 if types.RuntimeSymName(n.Type().Sym()) == "hex" {
630 on = typecheck.LookupRuntime("printhex")
631 } else {
632 on = typecheck.LookupRuntime("printuint")
633 }
634 case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
635 on = typecheck.LookupRuntime("printint")
636 case types.TFLOAT32, types.TFLOAT64:
637 on = typecheck.LookupRuntime("printfloat")
638 case types.TCOMPLEX64, types.TCOMPLEX128:
639 on = typecheck.LookupRuntime("printcomplex")
640 case types.TBOOL:
641 on = typecheck.LookupRuntime("printbool")
642 case types.TSTRING:
643 cs := ""
644 if ir.IsConst(n, constant.String) {
645 cs = ir.StringVal(n)
646 }
647 switch cs {
648 case " ":
649 on = typecheck.LookupRuntime("printsp")
650 case "\n":
651 on = typecheck.LookupRuntime("printnl")
652 default:
653 on = typecheck.LookupRuntime("printstring")
654 }
655 default:
656 badtype(ir.OPRINT, n.Type(), nil)
657 continue
658 }
659
660 r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
661 if params := on.Type().Params(); len(params) > 0 {
662 t := params[0].Type
663 n = typecheck.Conv(n, t)
664 r.Args.Append(n)
665 }
666 calls = append(calls, r)
667 }
668
669 calls = append(calls, mkcall("printunlock", nil, init))
670
671 typecheck.Stmts(calls)
672 walkExprList(calls, init)
673
674 r := ir.NewBlockStmt(base.Pos, nil)
675 r.List = calls
676 return walkStmt(typecheck.Stmt(r))
677 }
678
679
680 func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
681 return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
682 }
683
684
685 func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
686 slice := walkExpr(n.X, init)
687 res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
688 res.SetType(n.Type())
689 return walkExpr(res, init)
690 }
691
692 func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
693 ptr := safeExpr(n.X, init)
694 len := safeExpr(n.Y, init)
695 sliceType := n.Type()
696
697 lenType := types.Types[types.TINT64]
698 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
699
700
701
702
703
704
705 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
706 fnname := "unsafeslicecheckptr"
707 fn := typecheck.LookupRuntime(fnname)
708 init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
709 } else {
710
711
712 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
713 lenType = types.Types[types.TINT]
714 } else {
715
716
717
718
719 len64 := typecheck.Conv(len, lenType)
720 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
721 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
722 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
723 appendWalkStmt(init, nif)
724 }
725
726
727 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
728 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
729 nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
730 appendWalkStmt(init, nif)
731
732 if sliceType.Elem().Size() == 0 {
733
734
735
736 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
737 isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
738 gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
739 nifPtr.Cond =
740 ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
741 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
742 appendWalkStmt(init, nifPtr)
743
744 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
745 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
746 typecheck.Conv(len, types.Types[types.TINT]),
747 typecheck.Conv(len, types.Types[types.TINT]))
748 return walkExpr(typecheck.Expr(h), init)
749 }
750
751
752 mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
753 overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
754
755 decl := types.NewSignature(nil,
756 []*types.Field{
757 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
758 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
759 },
760 []*types.Field{
761 types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
762 types.NewField(base.Pos, nil, types.Types[types.TBOOL]),
763 })
764
765 fn := ir.NewFunc(n.Pos(), n.Pos(), math_MulUintptr, decl)
766
767 call := mkcall1(fn.Nname, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
768 appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
769
770
771
772
773
774
775
776 nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
777 memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
778 nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
779 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
780 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
781 nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
782 nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
783 appendWalkStmt(init, nif)
784 }
785
786 h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
787 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
788 typecheck.Conv(len, types.Types[types.TINT]),
789 typecheck.Conv(len, types.Types[types.TINT]))
790 return walkExpr(typecheck.Expr(h), init)
791 }
792
793 var math_MulUintptr = &types.Sym{Pkg: types.NewPkg("runtime/internal/math", "math"), Name: "MulUintptr"}
794
795 func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
796 ptr := safeExpr(n.X, init)
797 len := safeExpr(n.Y, init)
798
799 lenType := types.Types[types.TINT64]
800 unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
801
802
803
804
805 if ir.ShouldCheckPtr(ir.CurFunc, 1) {
806 fnname := "unsafestringcheckptr"
807 fn := typecheck.LookupRuntime(fnname)
808 init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
809 } else {
810
811
812 if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
813 lenType = types.Types[types.TINT]
814 } else {
815
816
817
818
819 len64 := typecheck.Conv(len, lenType)
820 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
821 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
822 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
823 appendWalkStmt(init, nif)
824 }
825
826
827 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
828 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
829 nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
830 appendWalkStmt(init, nif)
831
832
833
834
835
836
837
838 nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
839 nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
840 nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
841 nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
842 nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
843 nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
844 appendWalkStmt(init, nifLen)
845 }
846 h := ir.NewStringHeaderExpr(n.Pos(),
847 typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
848 typecheck.Conv(len, types.Types[types.TINT]),
849 )
850 return walkExpr(typecheck.Expr(h), init)
851 }
852
853 func badtype(op ir.Op, tl, tr *types.Type) {
854 var s string
855 if tl != nil {
856 s += fmt.Sprintf("\n\t%v", tl)
857 }
858 if tr != nil {
859 s += fmt.Sprintf("\n\t%v", tr)
860 }
861
862
863 if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
864 if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
865 s += "\n\t(*struct vs *interface)"
866 } else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
867 s += "\n\t(*interface vs *struct)"
868 }
869 }
870
871 base.Errorf("illegal types for operand: %v%s", op, s)
872 }
873
874 func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
875 return typecheck.LookupRuntime(name, l, r)
876 }
877
878
879
880 func isRuneCount(n ir.Node) bool {
881 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
882 }
883
884
885 func isByteCount(n ir.Node) bool {
886 return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN &&
887 (n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP)
888 }
889
View as plain text