1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 package liveness
16
17 import (
18 "fmt"
19 "os"
20 "sort"
21 "strings"
22
23 "cmd/compile/internal/abi"
24 "cmd/compile/internal/base"
25 "cmd/compile/internal/bitvec"
26 "cmd/compile/internal/ir"
27 "cmd/compile/internal/objw"
28 "cmd/compile/internal/reflectdata"
29 "cmd/compile/internal/ssa"
30 "cmd/compile/internal/typebits"
31 "cmd/compile/internal/types"
32 "cmd/internal/notsha256"
33 "cmd/internal/obj"
34 "cmd/internal/src"
35
36 rtabi "internal/abi"
37 )
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86 type blockEffects struct {
87
88
89
90
91
92 uevar bitvec.BitVec
93 varkill bitvec.BitVec
94
95
96
97
98
99 livein bitvec.BitVec
100 liveout bitvec.BitVec
101 }
102
103
104 type liveness struct {
105 fn *ir.Func
106 f *ssa.Func
107 vars []*ir.Name
108 idx map[*ir.Name]int32
109 stkptrsize int64
110
111 be []blockEffects
112
113
114
115 allUnsafe bool
116
117
118 unsafePoints bitvec.BitVec
119
120
121
122 unsafeBlocks bitvec.BitVec
123
124
125
126
127
128
129 livevars []bitvec.BitVec
130
131
132
133 livenessMap Map
134 stackMapSet bvecSet
135 stackMaps []bitvec.BitVec
136
137 cache progeffectscache
138
139
140
141
142 partLiveArgs map[*ir.Name]bool
143
144 doClobber bool
145 noClobberArgs bool
146 }
147
148
149
150
151 type Map struct {
152 Vals map[ssa.ID]objw.StackMapIndex
153 UnsafeVals map[ssa.ID]bool
154 UnsafeBlocks map[ssa.ID]bool
155
156
157 DeferReturn objw.StackMapIndex
158 }
159
160 func (m *Map) reset() {
161 if m.Vals == nil {
162 m.Vals = make(map[ssa.ID]objw.StackMapIndex)
163 m.UnsafeVals = make(map[ssa.ID]bool)
164 m.UnsafeBlocks = make(map[ssa.ID]bool)
165 } else {
166 for k := range m.Vals {
167 delete(m.Vals, k)
168 }
169 for k := range m.UnsafeVals {
170 delete(m.UnsafeVals, k)
171 }
172 for k := range m.UnsafeBlocks {
173 delete(m.UnsafeBlocks, k)
174 }
175 }
176 m.DeferReturn = objw.StackMapDontCare
177 }
178
179 func (m *Map) set(v *ssa.Value, i objw.StackMapIndex) {
180 m.Vals[v.ID] = i
181 }
182 func (m *Map) setUnsafeVal(v *ssa.Value) {
183 m.UnsafeVals[v.ID] = true
184 }
185 func (m *Map) setUnsafeBlock(b *ssa.Block) {
186 m.UnsafeBlocks[b.ID] = true
187 }
188
189 func (m Map) Get(v *ssa.Value) objw.StackMapIndex {
190
191 if idx, ok := m.Vals[v.ID]; ok {
192 return idx
193 }
194 return objw.StackMapDontCare
195 }
196 func (m Map) GetUnsafe(v *ssa.Value) bool {
197
198 return m.UnsafeVals[v.ID]
199 }
200 func (m Map) GetUnsafeBlock(b *ssa.Block) bool {
201
202 return m.UnsafeBlocks[b.ID]
203 }
204
205 type progeffectscache struct {
206 retuevar []int32
207 tailuevar []int32
208 initialized bool
209 }
210
211
212
213
214
215
216
217 func shouldTrack(n *ir.Name) bool {
218 return (n.Class == ir.PAUTO && n.Esc() != ir.EscHeap || n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT) && n.Type().HasPointers()
219 }
220
221
222
223 func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
224 var vars []*ir.Name
225 for _, n := range fn.Dcl {
226 if shouldTrack(n) {
227 vars = append(vars, n)
228 }
229 }
230 idx := make(map[*ir.Name]int32, len(vars))
231 for i, n := range vars {
232 idx[n] = int32(i)
233 }
234 return vars, idx
235 }
236
237 func (lv *liveness) initcache() {
238 if lv.cache.initialized {
239 base.Fatalf("liveness cache initialized twice")
240 return
241 }
242 lv.cache.initialized = true
243
244 for i, node := range lv.vars {
245 switch node.Class {
246 case ir.PPARAM:
247
248
249
250
251
252
253 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
254
255 case ir.PPARAMOUT:
256
257
258
259 lv.cache.retuevar = append(lv.cache.retuevar, int32(i))
260 }
261 }
262 }
263
264
265
266
267
268
269
270
271
272
273 type liveEffect int
274
275 const (
276 uevar liveEffect = 1 << iota
277 varkill
278 )
279
280
281
282
283 func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
284 n, e := affectedVar(v)
285 if e == 0 || n == nil {
286 return -1, 0
287 }
288
289
290
291
292 switch v.Op {
293 case ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive:
294 if !n.Used() {
295 return -1, 0
296 }
297 }
298
299 if n.Class == ir.PPARAM && !n.Addrtaken() && n.Type().Size() > int64(types.PtrSize) {
300
301
302 lv.partLiveArgs[n] = true
303 }
304
305 var effect liveEffect
306
307
308
309
310
311
312 if e&(ssa.SymRead|ssa.SymAddr) != 0 {
313 effect |= uevar
314 }
315 if e&ssa.SymWrite != 0 && (!isfat(n.Type()) || v.Op == ssa.OpVarDef) {
316 effect |= varkill
317 }
318
319 if effect == 0 {
320 return -1, 0
321 }
322
323 if pos, ok := lv.idx[n]; ok {
324 return pos, effect
325 }
326 return -1, 0
327 }
328
329
330 func affectedVar(v *ssa.Value) (*ir.Name, ssa.SymEffect) {
331
332 switch v.Op {
333 case ssa.OpLoadReg:
334 n, _ := ssa.AutoVar(v.Args[0])
335 return n, ssa.SymRead
336 case ssa.OpStoreReg:
337 n, _ := ssa.AutoVar(v)
338 return n, ssa.SymWrite
339
340 case ssa.OpArgIntReg:
341
342
343
344
345
346
347
348
349
350
351
352
353 n, _ := ssa.AutoVar(v)
354 return n, ssa.SymRead
355
356 case ssa.OpVarLive:
357 return v.Aux.(*ir.Name), ssa.SymRead
358 case ssa.OpVarDef:
359 return v.Aux.(*ir.Name), ssa.SymWrite
360 case ssa.OpKeepAlive:
361 n, _ := ssa.AutoVar(v.Args[0])
362 return n, ssa.SymRead
363 }
364
365 e := v.Op.SymEffect()
366 if e == 0 {
367 return nil, 0
368 }
369
370 switch a := v.Aux.(type) {
371 case nil, *obj.LSym:
372
373 return nil, e
374 case *ir.Name:
375 return a, e
376 default:
377 base.Fatalf("weird aux: %s", v.LongString())
378 return nil, e
379 }
380 }
381
382 type livenessFuncCache struct {
383 be []blockEffects
384 livenessMap Map
385 }
386
387
388
389
390 func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *liveness {
391 lv := &liveness{
392 fn: fn,
393 f: f,
394 vars: vars,
395 idx: idx,
396 stkptrsize: stkptrsize,
397 }
398
399
400
401
402 if lc, _ := f.Cache.Liveness.(*livenessFuncCache); lc == nil {
403
404 f.Cache.Liveness = new(livenessFuncCache)
405 } else {
406 if cap(lc.be) >= f.NumBlocks() {
407 lv.be = lc.be[:f.NumBlocks()]
408 }
409 lv.livenessMap = Map{
410 Vals: lc.livenessMap.Vals,
411 UnsafeVals: lc.livenessMap.UnsafeVals,
412 UnsafeBlocks: lc.livenessMap.UnsafeBlocks,
413 DeferReturn: objw.StackMapDontCare,
414 }
415 lc.livenessMap.Vals = nil
416 lc.livenessMap.UnsafeVals = nil
417 lc.livenessMap.UnsafeBlocks = nil
418 }
419 if lv.be == nil {
420 lv.be = make([]blockEffects, f.NumBlocks())
421 }
422
423 nblocks := int32(len(f.Blocks))
424 nvars := int32(len(vars))
425 bulk := bitvec.NewBulk(nvars, nblocks*7)
426 for _, b := range f.Blocks {
427 be := lv.blockEffects(b)
428
429 be.uevar = bulk.Next()
430 be.varkill = bulk.Next()
431 be.livein = bulk.Next()
432 be.liveout = bulk.Next()
433 }
434 lv.livenessMap.reset()
435
436 lv.markUnsafePoints()
437
438 lv.partLiveArgs = make(map[*ir.Name]bool)
439
440 lv.enableClobber()
441
442 return lv
443 }
444
445 func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
446 return &lv.be[b.ID]
447 }
448
449
450
451
452 func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
453 for i := int32(0); ; i++ {
454 i = liveout.Next(i)
455 if i < 0 {
456 break
457 }
458 node := vars[i]
459 switch node.Class {
460 case ir.PPARAM, ir.PPARAMOUT:
461 if !node.IsOutputParamInRegisters() {
462 if node.FrameOffset() < 0 {
463 lv.f.Fatalf("Node %v has frameoffset %d\n", node.Sym().Name, node.FrameOffset())
464 }
465 typebits.SetNoCheck(node.Type(), node.FrameOffset(), args)
466 break
467 }
468 fallthrough
469 case ir.PAUTO:
470 typebits.Set(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
471 }
472 }
473 }
474
475
476
477 func IsUnsafe(f *ssa.Func) bool {
478
479
480
481
482
483
484
485
486
487 return base.Flag.CompilingRuntime || f.NoSplit
488 }
489
490
491 func (lv *liveness) markUnsafePoints() {
492 if IsUnsafe(lv.f) {
493
494 lv.allUnsafe = true
495 return
496 }
497
498 lv.unsafePoints = bitvec.New(int32(lv.f.NumValues()))
499 lv.unsafeBlocks = bitvec.New(int32(lv.f.NumBlocks()))
500
501
502 for _, b := range lv.f.Blocks {
503 for _, v := range b.Values {
504 if v.Op.UnsafePoint() {
505 lv.unsafePoints.Set(int32(v.ID))
506 }
507 }
508 }
509
510 for _, b := range lv.f.Blocks {
511 for _, v := range b.Values {
512 if v.Op != ssa.OpWBend {
513 continue
514 }
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531 m := v
532 for {
533 m = m.MemoryArg()
534 if m.Block != b {
535 lv.f.Fatalf("can't find Phi before write barrier end mark %v", v)
536 }
537 if m.Op == ssa.OpPhi {
538 break
539 }
540 }
541
542 if len(m.Args) != 2 {
543 lv.f.Fatalf("phi before write barrier end mark has %d args, want 2", len(m.Args))
544 }
545 c := b.Preds[0].Block()
546 d := b.Preds[1].Block()
547
548
549
550
551 var decisionBlock *ssa.Block
552 if len(c.Preds) == 1 && c.Preds[0].Block() == d {
553 decisionBlock = d
554 } else if len(d.Preds) == 1 && d.Preds[0].Block() == c {
555 decisionBlock = c
556 } else if len(c.Preds) == 1 && len(d.Preds) == 1 && c.Preds[0].Block() == d.Preds[0].Block() {
557 decisionBlock = c.Preds[0].Block()
558 } else {
559 lv.f.Fatalf("can't find write barrier pattern %v", v)
560 }
561 if len(decisionBlock.Succs) != 2 {
562 lv.f.Fatalf("common predecessor block the wrong type %s", decisionBlock.Kind)
563 }
564
565
566
567
568
569
570 var load *ssa.Value
571 v := decisionBlock.Controls[0]
572 for {
573 if v.MemoryArg() != nil {
574
575 if sym, ok := v.Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
576 load = v
577 break
578 }
579
580
581 if sym, ok := v.Args[0].Aux.(*obj.LSym); ok && sym == ir.Syms.WriteBarrier {
582 load = v
583 break
584 }
585 v.Fatalf("load of write barrier flag not from correct global: %s", v.LongString())
586 }
587
588 if len(v.Args) == 1 || len(v.Args) == 2 && v.Args[0] == v.Args[1] {
589
590 v = v.Args[0]
591 continue
592 }
593 v.Fatalf("write barrier control value has more than one argument: %s", v.LongString())
594 }
595
596
597 found := false
598 for _, v := range decisionBlock.Values {
599 if found {
600 lv.unsafePoints.Set(int32(v.ID))
601 }
602 found = found || v == load
603 }
604 lv.unsafeBlocks.Set(int32(decisionBlock.ID))
605
606
607 for _, e := range decisionBlock.Succs {
608 x := e.Block()
609 if x == b {
610 continue
611 }
612 for _, v := range x.Values {
613 lv.unsafePoints.Set(int32(v.ID))
614 }
615 lv.unsafeBlocks.Set(int32(x.ID))
616 }
617
618
619 for _, v := range b.Values {
620 if v.Op == ssa.OpWBend {
621 break
622 }
623 lv.unsafePoints.Set(int32(v.ID))
624 }
625 }
626 }
627 }
628
629
630
631
632
633
634 func (lv *liveness) hasStackMap(v *ssa.Value) bool {
635 if !v.Op.IsCall() {
636 return false
637 }
638
639
640
641 if sym, ok := v.Aux.(*ssa.AuxCall); ok && (sym.Fn == ir.Syms.WBZero || sym.Fn == ir.Syms.WBMove) {
642 return false
643 }
644 return true
645 }
646
647
648
649
650 func (lv *liveness) prologue() {
651 lv.initcache()
652
653 for _, b := range lv.f.Blocks {
654 be := lv.blockEffects(b)
655
656
657
658 for j := len(b.Values) - 1; j >= 0; j-- {
659 pos, e := lv.valueEffects(b.Values[j])
660 if e&varkill != 0 {
661 be.varkill.Set(pos)
662 be.uevar.Unset(pos)
663 }
664 if e&uevar != 0 {
665 be.uevar.Set(pos)
666 }
667 }
668 }
669 }
670
671
672 func (lv *liveness) solve() {
673
674
675 nvars := int32(len(lv.vars))
676 newlivein := bitvec.New(nvars)
677 newliveout := bitvec.New(nvars)
678
679
680 po := lv.f.Postorder()
681
682
683
684
685
686 for change := true; change; {
687 change = false
688 for _, b := range po {
689 be := lv.blockEffects(b)
690
691 newliveout.Clear()
692 switch b.Kind {
693 case ssa.BlockRet:
694 for _, pos := range lv.cache.retuevar {
695 newliveout.Set(pos)
696 }
697 case ssa.BlockRetJmp:
698 for _, pos := range lv.cache.tailuevar {
699 newliveout.Set(pos)
700 }
701 case ssa.BlockExit:
702
703 default:
704
705
706
707
708 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein)
709 for _, succ := range b.Succs[1:] {
710 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein)
711 }
712 }
713
714 if !be.liveout.Eq(newliveout) {
715 change = true
716 be.liveout.Copy(newliveout)
717 }
718
719
720
721
722
723
724 newlivein.AndNot(be.liveout, be.varkill)
725 be.livein.Or(newlivein, be.uevar)
726 }
727 }
728 }
729
730
731
732 func (lv *liveness) epilogue() {
733 nvars := int32(len(lv.vars))
734 liveout := bitvec.New(nvars)
735 livedefer := bitvec.New(nvars)
736
737
738
739
740
741
742
743
744 if lv.fn.HasDefer() {
745 for i, n := range lv.vars {
746 if n.Class == ir.PPARAMOUT {
747 if n.IsOutputParamHeapAddr() {
748
749 base.Fatalf("variable %v both output param and heap output param", n)
750 }
751 if n.Heapaddr != nil {
752
753
754 continue
755 }
756
757 livedefer.Set(int32(i))
758 }
759 if n.IsOutputParamHeapAddr() {
760
761
762
763 n.SetNeedzero(true)
764 livedefer.Set(int32(i))
765 }
766 if n.OpenDeferSlot() {
767
768
769
770
771 livedefer.Set(int32(i))
772
773 if !n.Needzero() {
774 base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
775 }
776 }
777 }
778 }
779
780
781
782
783 if lv.f.Entry != lv.f.Blocks[0] {
784 lv.f.Fatalf("entry block must be first")
785 }
786
787 {
788
789 live := bitvec.New(nvars)
790 lv.livevars = append(lv.livevars, live)
791 }
792
793 for _, b := range lv.f.Blocks {
794 be := lv.blockEffects(b)
795
796
797
798 for _, v := range b.Values {
799 if !lv.hasStackMap(v) {
800 continue
801 }
802
803 live := bitvec.New(nvars)
804 lv.livevars = append(lv.livevars, live)
805 }
806
807
808 index := int32(len(lv.livevars) - 1)
809
810 liveout.Copy(be.liveout)
811 for i := len(b.Values) - 1; i >= 0; i-- {
812 v := b.Values[i]
813
814 if lv.hasStackMap(v) {
815
816
817
818 live := &lv.livevars[index]
819 live.Or(*live, liveout)
820 live.Or(*live, livedefer)
821 index--
822 }
823
824
825 pos, e := lv.valueEffects(v)
826 if e&varkill != 0 {
827 liveout.Unset(pos)
828 }
829 if e&uevar != 0 {
830 liveout.Set(pos)
831 }
832 }
833
834 if b == lv.f.Entry {
835 if index != 0 {
836 base.Fatalf("bad index for entry point: %v", index)
837 }
838
839
840 for i, n := range lv.vars {
841 if !liveout.Get(int32(i)) {
842 continue
843 }
844 if n.Class == ir.PPARAM {
845 continue
846 }
847 base.FatalfAt(n.Pos(), "bad live variable at entry of %v: %L", lv.fn.Nname, n)
848 }
849
850
851 live := &lv.livevars[index]
852 live.Or(*live, liveout)
853 }
854
855 if lv.doClobber {
856 lv.clobber(b)
857 }
858
859
860 lv.compact(b)
861 }
862
863
864 if lv.fn.OpenCodedDeferDisallowed() {
865 lv.livenessMap.DeferReturn = objw.StackMapDontCare
866 } else {
867 idx, _ := lv.stackMapSet.add(livedefer)
868 lv.livenessMap.DeferReturn = objw.StackMapIndex(idx)
869 }
870
871
872 lv.stackMaps = lv.stackMapSet.extractUnique()
873 lv.stackMapSet = bvecSet{}
874
875
876
877
878 for j, n := range lv.vars {
879 if n.Class != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
880 lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
881 }
882 }
883 }
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901 func (lv *liveness) compact(b *ssa.Block) {
902 pos := 0
903 if b == lv.f.Entry {
904
905 lv.stackMapSet.add(lv.livevars[0])
906 pos++
907 }
908 for _, v := range b.Values {
909 if lv.hasStackMap(v) {
910 idx, _ := lv.stackMapSet.add(lv.livevars[pos])
911 pos++
912 lv.livenessMap.set(v, objw.StackMapIndex(idx))
913 }
914 if lv.allUnsafe || v.Op != ssa.OpClobber && lv.unsafePoints.Get(int32(v.ID)) {
915 lv.livenessMap.setUnsafeVal(v)
916 }
917 }
918 if lv.allUnsafe || lv.unsafeBlocks.Get(int32(b.ID)) {
919 lv.livenessMap.setUnsafeBlock(b)
920 }
921
922
923 lv.livevars = lv.livevars[:0]
924 }
925
926 func (lv *liveness) enableClobber() {
927
928
929 if !base.Flag.ClobberDead {
930 return
931 }
932 if lv.fn.Pragma&ir.CgoUnsafeArgs != 0 {
933
934 return
935 }
936 if len(lv.vars) > 10000 || len(lv.f.Blocks) > 10000 {
937
938
939
940 return
941 }
942 if lv.f.Name == "forkAndExecInChild" {
943
944
945
946
947
948 return
949 }
950 if lv.f.Name == "wbBufFlush" ||
951 ((lv.f.Name == "callReflect" || lv.f.Name == "callMethod") && lv.fn.ABIWrapper()) {
952
953
954
955
956
957
958
959
960
961
962 lv.noClobberArgs = true
963 }
964 if h := os.Getenv("GOCLOBBERDEADHASH"); h != "" {
965
966
967 hstr := ""
968 for _, b := range notsha256.Sum256([]byte(lv.f.Name)) {
969 hstr += fmt.Sprintf("%08b", b)
970 }
971 if !strings.HasSuffix(hstr, h) {
972 return
973 }
974 fmt.Printf("\t\t\tCLOBBERDEAD %s\n", lv.f.Name)
975 }
976 lv.doClobber = true
977 }
978
979
980
981 func (lv *liveness) clobber(b *ssa.Block) {
982
983 oldSched := append([]*ssa.Value{}, b.Values...)
984 b.Values = b.Values[:0]
985 idx := 0
986
987
988 if b == lv.f.Entry {
989 for len(oldSched) > 0 && len(oldSched[0].Args) == 0 {
990
991
992
993
994 b.Values = append(b.Values, oldSched[0])
995 oldSched = oldSched[1:]
996 }
997 clobber(lv, b, lv.livevars[0])
998 idx++
999 }
1000
1001
1002 for _, v := range oldSched {
1003 if !lv.hasStackMap(v) {
1004 b.Values = append(b.Values, v)
1005 continue
1006 }
1007 clobber(lv, b, lv.livevars[idx])
1008 b.Values = append(b.Values, v)
1009 idx++
1010 }
1011 }
1012
1013
1014
1015
1016 func clobber(lv *liveness, b *ssa.Block, live bitvec.BitVec) {
1017 for i, n := range lv.vars {
1018 if !live.Get(int32(i)) && !n.Addrtaken() && !n.OpenDeferSlot() && !n.IsOutputParamHeapAddr() {
1019
1020
1021
1022
1023 if lv.noClobberArgs && n.Class == ir.PPARAM {
1024 continue
1025 }
1026 clobberVar(b, n)
1027 }
1028 }
1029 }
1030
1031
1032
1033 func clobberVar(b *ssa.Block, v *ir.Name) {
1034 clobberWalk(b, v, 0, v.Type())
1035 }
1036
1037
1038
1039
1040
1041 func clobberWalk(b *ssa.Block, v *ir.Name, offset int64, t *types.Type) {
1042 if !t.HasPointers() {
1043 return
1044 }
1045 switch t.Kind() {
1046 case types.TPTR,
1047 types.TUNSAFEPTR,
1048 types.TFUNC,
1049 types.TCHAN,
1050 types.TMAP:
1051 clobberPtr(b, v, offset)
1052
1053 case types.TSTRING:
1054
1055 clobberPtr(b, v, offset)
1056
1057 case types.TINTER:
1058
1059
1060
1061 clobberPtr(b, v, offset)
1062 clobberPtr(b, v, offset+int64(types.PtrSize))
1063
1064 case types.TSLICE:
1065
1066 clobberPtr(b, v, offset)
1067
1068 case types.TARRAY:
1069 for i := int64(0); i < t.NumElem(); i++ {
1070 clobberWalk(b, v, offset+i*t.Elem().Size(), t.Elem())
1071 }
1072
1073 case types.TSTRUCT:
1074 for _, t1 := range t.Fields() {
1075 clobberWalk(b, v, offset+t1.Offset, t1.Type)
1076 }
1077
1078 default:
1079 base.Fatalf("clobberWalk: unexpected type, %v", t)
1080 }
1081 }
1082
1083
1084
1085 func clobberPtr(b *ssa.Block, v *ir.Name, offset int64) {
1086 b.NewValue0IA(src.NoXPos, ssa.OpClobber, types.TypeVoid, offset, v)
1087 }
1088
1089 func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
1090 if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
1091 return
1092 }
1093 if lv.fn.Wrapper() || lv.fn.Dupok() {
1094
1095 return
1096 }
1097 if !(v == nil || v.Op.IsCall()) {
1098
1099
1100 return
1101 }
1102 if live.IsEmpty() {
1103 return
1104 }
1105
1106 pos := lv.fn.Nname.Pos()
1107 if v != nil {
1108 pos = v.Pos
1109 }
1110
1111 s := "live at "
1112 if v == nil {
1113 s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
1114 } else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
1115 fn := sym.Fn.Name
1116 if pos := strings.Index(fn, "."); pos >= 0 {
1117 fn = fn[pos+1:]
1118 }
1119 s += fmt.Sprintf("call to %s:", fn)
1120 } else {
1121 s += "indirect call:"
1122 }
1123
1124
1125
1126 var names []string
1127 for j, n := range lv.vars {
1128 if live.Get(int32(j)) {
1129 names = append(names, n.Sym().Name)
1130 }
1131 }
1132 sort.Strings(names)
1133 for _, v := range names {
1134 s += " " + v
1135 }
1136
1137 base.WarnfAt(pos, s)
1138 }
1139
1140 func (lv *liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
1141 if live.IsEmpty() {
1142 return printed
1143 }
1144
1145 if !printed {
1146 fmt.Printf("\t")
1147 } else {
1148 fmt.Printf(" ")
1149 }
1150 fmt.Printf("%s=", name)
1151
1152 comma := ""
1153 for i, n := range lv.vars {
1154 if !live.Get(int32(i)) {
1155 continue
1156 }
1157 fmt.Printf("%s%s", comma, n.Sym().Name)
1158 comma = ","
1159 }
1160 return true
1161 }
1162
1163
1164 func (lv *liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
1165 if !x {
1166 return printed
1167 }
1168 if !printed {
1169 fmt.Printf("\t")
1170 } else {
1171 fmt.Printf(" ")
1172 }
1173 fmt.Printf("%s=", name)
1174 if x {
1175 fmt.Printf("%s", lv.vars[pos].Sym().Name)
1176 }
1177
1178 return true
1179 }
1180
1181
1182
1183
1184 func (lv *liveness) printDebug() {
1185 fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
1186
1187 for i, b := range lv.f.Blocks {
1188 if i > 0 {
1189 fmt.Printf("\n")
1190 }
1191
1192
1193 fmt.Printf("bb#%d pred=", b.ID)
1194 for j, pred := range b.Preds {
1195 if j > 0 {
1196 fmt.Printf(",")
1197 }
1198 fmt.Printf("%d", pred.Block().ID)
1199 }
1200 fmt.Printf(" succ=")
1201 for j, succ := range b.Succs {
1202 if j > 0 {
1203 fmt.Printf(",")
1204 }
1205 fmt.Printf("%d", succ.Block().ID)
1206 }
1207 fmt.Printf("\n")
1208
1209 be := lv.blockEffects(b)
1210
1211
1212 printed := false
1213 printed = lv.printbvec(printed, "uevar", be.uevar)
1214 printed = lv.printbvec(printed, "livein", be.livein)
1215 if printed {
1216 fmt.Printf("\n")
1217 }
1218
1219
1220
1221 if b == lv.f.Entry {
1222 live := lv.stackMaps[0]
1223 fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Nname.Pos()))
1224 fmt.Printf("\tlive=")
1225 printed = false
1226 for j, n := range lv.vars {
1227 if !live.Get(int32(j)) {
1228 continue
1229 }
1230 if printed {
1231 fmt.Printf(",")
1232 }
1233 fmt.Printf("%v", n)
1234 printed = true
1235 }
1236 fmt.Printf("\n")
1237 }
1238
1239 for _, v := range b.Values {
1240 fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString())
1241
1242 pcdata := lv.livenessMap.Get(v)
1243
1244 pos, effect := lv.valueEffects(v)
1245 printed = false
1246 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0)
1247 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0)
1248 if printed {
1249 fmt.Printf("\n")
1250 }
1251
1252 if pcdata.StackMapValid() {
1253 fmt.Printf("\tlive=")
1254 printed = false
1255 if pcdata.StackMapValid() {
1256 live := lv.stackMaps[pcdata]
1257 for j, n := range lv.vars {
1258 if !live.Get(int32(j)) {
1259 continue
1260 }
1261 if printed {
1262 fmt.Printf(",")
1263 }
1264 fmt.Printf("%v", n)
1265 printed = true
1266 }
1267 }
1268 fmt.Printf("\n")
1269 }
1270
1271 if lv.livenessMap.GetUnsafe(v) {
1272 fmt.Printf("\tunsafe-point\n")
1273 }
1274 }
1275 if lv.livenessMap.GetUnsafeBlock(b) {
1276 fmt.Printf("\tunsafe-block\n")
1277 }
1278
1279
1280 fmt.Printf("end\n")
1281 printed = false
1282 printed = lv.printbvec(printed, "varkill", be.varkill)
1283 printed = lv.printbvec(printed, "liveout", be.liveout)
1284 if printed {
1285 fmt.Printf("\n")
1286 }
1287 }
1288
1289 fmt.Printf("\n")
1290 }
1291
1292
1293
1294
1295
1296 func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
1297
1298
1299
1300 var maxArgNode *ir.Name
1301 for _, n := range lv.vars {
1302 switch n.Class {
1303 case ir.PPARAM, ir.PPARAMOUT:
1304 if !n.IsOutputParamInRegisters() {
1305 if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
1306 maxArgNode = n
1307 }
1308 }
1309 }
1310 }
1311
1312 var maxArgs int64
1313 if maxArgNode != nil {
1314 maxArgs = maxArgNode.FrameOffset() + types.PtrDataSize(maxArgNode.Type())
1315 }
1316
1317
1318
1319
1320
1321
1322
1323
1324 maxLocals := lv.stkptrsize
1325
1326
1327 var argsSymTmp, liveSymTmp obj.LSym
1328
1329 args := bitvec.New(int32(maxArgs / int64(types.PtrSize)))
1330 aoff := objw.Uint32(&argsSymTmp, 0, uint32(len(lv.stackMaps)))
1331 aoff = objw.Uint32(&argsSymTmp, aoff, uint32(args.N))
1332
1333 locals := bitvec.New(int32(maxLocals / int64(types.PtrSize)))
1334 loff := objw.Uint32(&liveSymTmp, 0, uint32(len(lv.stackMaps)))
1335 loff = objw.Uint32(&liveSymTmp, loff, uint32(locals.N))
1336
1337 for _, live := range lv.stackMaps {
1338 args.Clear()
1339 locals.Clear()
1340
1341 lv.pointerMap(live, lv.vars, args, locals)
1342
1343 aoff = objw.BitVec(&argsSymTmp, aoff, args)
1344 loff = objw.BitVec(&liveSymTmp, loff, locals)
1345 }
1346
1347
1348
1349 return base.Ctxt.GCLocalsSym(argsSymTmp.P), base.Ctxt.GCLocalsSym(liveSymTmp.P)
1350 }
1351
1352
1353
1354
1355
1356
1357 func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) (Map, map[*ir.Name]bool) {
1358
1359 vars, idx := getvariables(curfn)
1360 lv := newliveness(curfn, f, vars, idx, stkptrsize)
1361
1362
1363 lv.prologue()
1364 lv.solve()
1365 lv.epilogue()
1366 if base.Flag.Live > 0 {
1367 lv.showlive(nil, lv.stackMaps[0])
1368 for _, b := range f.Blocks {
1369 for _, val := range b.Values {
1370 if idx := lv.livenessMap.Get(val); idx.StackMapValid() {
1371 lv.showlive(val, lv.stackMaps[idx])
1372 }
1373 }
1374 }
1375 }
1376 if base.Flag.Live >= 2 {
1377 lv.printDebug()
1378 }
1379
1380
1381 {
1382 cache := f.Cache.Liveness.(*livenessFuncCache)
1383 if cap(lv.be) < 2000 {
1384 for i := range lv.be {
1385 lv.be[i] = blockEffects{}
1386 }
1387 cache.be = lv.be
1388 }
1389 if len(lv.livenessMap.Vals) < 2000 {
1390 cache.livenessMap = lv.livenessMap
1391 }
1392 }
1393
1394
1395 ls := curfn.LSym
1396 fninfo := ls.Func()
1397 fninfo.GCArgs, fninfo.GCLocals = lv.emit()
1398
1399 p := pp.Prog(obj.AFUNCDATA)
1400 p.From.SetConst(rtabi.FUNCDATA_ArgsPointerMaps)
1401 p.To.Type = obj.TYPE_MEM
1402 p.To.Name = obj.NAME_EXTERN
1403 p.To.Sym = fninfo.GCArgs
1404
1405 p = pp.Prog(obj.AFUNCDATA)
1406 p.From.SetConst(rtabi.FUNCDATA_LocalsPointerMaps)
1407 p.To.Type = obj.TYPE_MEM
1408 p.To.Name = obj.NAME_EXTERN
1409 p.To.Sym = fninfo.GCLocals
1410
1411 if x := lv.emitStackObjects(); x != nil {
1412 p := pp.Prog(obj.AFUNCDATA)
1413 p.From.SetConst(rtabi.FUNCDATA_StackObjects)
1414 p.To.Type = obj.TYPE_MEM
1415 p.To.Name = obj.NAME_EXTERN
1416 p.To.Sym = x
1417 }
1418
1419 return lv.livenessMap, lv.partLiveArgs
1420 }
1421
1422 func (lv *liveness) emitStackObjects() *obj.LSym {
1423 var vars []*ir.Name
1424 for _, n := range lv.fn.Dcl {
1425 if shouldTrack(n) && n.Addrtaken() && n.Esc() != ir.EscHeap {
1426 vars = append(vars, n)
1427 }
1428 }
1429 if len(vars) == 0 {
1430 return nil
1431 }
1432
1433
1434 sort.Slice(vars, func(i, j int) bool { return vars[i].FrameOffset() < vars[j].FrameOffset() })
1435
1436
1437
1438 x := base.Ctxt.Lookup(lv.fn.LSym.Name + ".stkobj")
1439 x.Set(obj.AttrContentAddressable, true)
1440 lv.fn.LSym.Func().StackObjects = x
1441 off := 0
1442 off = objw.Uintptr(x, off, uint64(len(vars)))
1443 for _, v := range vars {
1444
1445
1446
1447
1448
1449 frameOffset := v.FrameOffset()
1450 if frameOffset != int64(int32(frameOffset)) {
1451 base.Fatalf("frame offset too big: %v %d", v, frameOffset)
1452 }
1453 off = objw.Uint32(x, off, uint32(frameOffset))
1454
1455 t := v.Type()
1456 sz := t.Size()
1457 if sz != int64(int32(sz)) {
1458 base.Fatalf("stack object too big: %v of type %v, size %d", v, t, sz)
1459 }
1460 lsym, useGCProg, ptrdata := reflectdata.GCSym(t)
1461 if useGCProg {
1462 ptrdata = -ptrdata
1463 }
1464 off = objw.Uint32(x, off, uint32(sz))
1465 off = objw.Uint32(x, off, uint32(ptrdata))
1466 off = objw.SymPtrOff(x, off, lsym)
1467 }
1468
1469 if base.Flag.Live != 0 {
1470 for _, v := range vars {
1471 base.WarnfAt(v.Pos(), "stack object %v %v", v, v.Type())
1472 }
1473 }
1474
1475 return x
1476 }
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491 func isfat(t *types.Type) bool {
1492 if t != nil {
1493 switch t.Kind() {
1494 case types.TSLICE, types.TSTRING,
1495 types.TINTER:
1496 return true
1497 case types.TARRAY:
1498
1499 if t.NumElem() == 1 {
1500 return isfat(t.Elem())
1501 }
1502 return true
1503 case types.TSTRUCT:
1504
1505 if t.NumFields() == 1 {
1506 return isfat(t.Field(0).Type)
1507 }
1508 return true
1509 }
1510 }
1511
1512 return false
1513 }
1514
1515
1516
1517
1518 func WriteFuncMap(fn *ir.Func, abiInfo *abi.ABIParamResultInfo) {
1519 if ir.FuncName(fn) == "_" || fn.Sym().Linkname != "" {
1520 return
1521 }
1522 nptr := int(abiInfo.ArgWidth() / int64(types.PtrSize))
1523 bv := bitvec.New(int32(nptr))
1524
1525 for _, p := range abiInfo.InParams() {
1526 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1527 }
1528
1529 nbitmap := 1
1530 if fn.Type().NumResults() > 0 {
1531 nbitmap = 2
1532 }
1533 lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
1534 off := objw.Uint32(lsym, 0, uint32(nbitmap))
1535 off = objw.Uint32(lsym, off, uint32(bv.N))
1536 off = objw.BitVec(lsym, off, bv)
1537
1538 if fn.Type().NumResults() > 0 {
1539 for _, p := range abiInfo.OutParams() {
1540 if len(p.Registers) == 0 {
1541 typebits.SetNoCheck(p.Type, p.FrameOffset(abiInfo), bv)
1542 }
1543 }
1544 off = objw.BitVec(lsym, off, bv)
1545 }
1546
1547 objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
1548 }
1549
View as plain text