1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/base"
9 "cmd/compile/internal/logopt"
10 "cmd/compile/internal/reflectdata"
11 "cmd/compile/internal/types"
12 "cmd/internal/obj"
13 "cmd/internal/obj/s390x"
14 "cmd/internal/objabi"
15 "cmd/internal/src"
16 "encoding/binary"
17 "fmt"
18 "internal/buildcfg"
19 "io"
20 "math"
21 "math/bits"
22 "os"
23 "path/filepath"
24 "strings"
25 )
26
27 type deadValueChoice bool
28
29 const (
30 leaveDeadValues deadValueChoice = false
31 removeDeadValues = true
32 )
33
34
35 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValueChoice) {
36
37 pendingLines := f.cachedLineStarts
38 pendingLines.clear()
39 debug := f.pass.debug
40 if debug > 1 {
41 fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
42 }
43 var iters int
44 var states map[string]bool
45 for {
46 change := false
47 deadChange := false
48 for _, b := range f.Blocks {
49 var b0 *Block
50 if debug > 1 {
51 b0 = new(Block)
52 *b0 = *b
53 b0.Succs = append([]Edge{}, b.Succs...)
54 }
55 for i, c := range b.ControlValues() {
56 for c.Op == OpCopy {
57 c = c.Args[0]
58 b.ReplaceControl(i, c)
59 }
60 }
61 if rb(b) {
62 change = true
63 if debug > 1 {
64 fmt.Printf("rewriting %s -> %s\n", b0.LongString(), b.LongString())
65 }
66 }
67 for j, v := range b.Values {
68 var v0 *Value
69 if debug > 1 {
70 v0 = new(Value)
71 *v0 = *v
72 v0.Args = append([]*Value{}, v.Args...)
73 }
74 if v.Uses == 0 && v.removeable() {
75 if v.Op != OpInvalid && deadcode == removeDeadValues {
76
77
78
79
80 v.reset(OpInvalid)
81 deadChange = true
82 }
83
84 continue
85 }
86
87 vchange := phielimValue(v)
88 if vchange && debug > 1 {
89 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
90 }
91
92
93
94
95
96
97
98
99 for i, a := range v.Args {
100 if a.Op != OpCopy {
101 continue
102 }
103 aa := copySource(a)
104 v.SetArg(i, aa)
105
106
107
108
109
110 if a.Pos.IsStmt() == src.PosIsStmt {
111 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
112 aa.Pos = aa.Pos.WithIsStmt()
113 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
114 v.Pos = v.Pos.WithIsStmt()
115 } else {
116
117
118
119
120 pendingLines.set(a.Pos, int32(a.Block.ID))
121 }
122 a.Pos = a.Pos.WithNotStmt()
123 }
124 vchange = true
125 for a.Uses == 0 {
126 b := a.Args[0]
127 a.reset(OpInvalid)
128 a = b
129 }
130 }
131 if vchange && debug > 1 {
132 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
133 }
134
135
136 if rv(v) {
137 vchange = true
138
139 if v.Pos.IsStmt() == src.PosIsStmt {
140 if k := nextGoodStatementIndex(v, j, b); k != j {
141 v.Pos = v.Pos.WithNotStmt()
142 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
143 }
144 }
145 }
146
147 change = change || vchange
148 if vchange && debug > 1 {
149 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
150 }
151 }
152 }
153 if !change && !deadChange {
154 break
155 }
156 iters++
157 if (iters > 1000 || debug >= 2) && change {
158
159
160
161
162
163 if states == nil {
164 states = make(map[string]bool)
165 }
166 h := f.rewriteHash()
167 if _, ok := states[h]; ok {
168
169
170
171
172 if debug < 2 {
173 debug = 2
174 states = make(map[string]bool)
175 } else {
176 f.Fatalf("rewrite cycle detected")
177 }
178 }
179 states[h] = true
180 }
181 }
182
183 for _, b := range f.Blocks {
184 j := 0
185 for i, v := range b.Values {
186 vl := v.Pos
187 if v.Op == OpInvalid {
188 if v.Pos.IsStmt() == src.PosIsStmt {
189 pendingLines.set(vl, int32(b.ID))
190 }
191 f.freeValue(v)
192 continue
193 }
194 if v.Pos.IsStmt() != src.PosNotStmt && !notStmtBoundary(v.Op) && pendingLines.get(vl) == int32(b.ID) {
195 pendingLines.remove(vl)
196 v.Pos = v.Pos.WithIsStmt()
197 }
198 if i != j {
199 b.Values[j] = v
200 }
201 j++
202 }
203 if pendingLines.get(b.Pos) == int32(b.ID) {
204 b.Pos = b.Pos.WithIsStmt()
205 pendingLines.remove(b.Pos)
206 }
207 b.truncateValues(j)
208 }
209 }
210
211
212
213 func is64BitFloat(t *types.Type) bool {
214 return t.Size() == 8 && t.IsFloat()
215 }
216
217 func is32BitFloat(t *types.Type) bool {
218 return t.Size() == 4 && t.IsFloat()
219 }
220
221 func is64BitInt(t *types.Type) bool {
222 return t.Size() == 8 && t.IsInteger()
223 }
224
225 func is32BitInt(t *types.Type) bool {
226 return t.Size() == 4 && t.IsInteger()
227 }
228
229 func is16BitInt(t *types.Type) bool {
230 return t.Size() == 2 && t.IsInteger()
231 }
232
233 func is8BitInt(t *types.Type) bool {
234 return t.Size() == 1 && t.IsInteger()
235 }
236
237 func isPtr(t *types.Type) bool {
238 return t.IsPtrShaped()
239 }
240
241
242
243 func mergeSym(x, y Sym) Sym {
244 if x == nil {
245 return y
246 }
247 if y == nil {
248 return x
249 }
250 panic(fmt.Sprintf("mergeSym with two non-nil syms %v %v", x, y))
251 }
252
253 func canMergeSym(x, y Sym) bool {
254 return x == nil || y == nil
255 }
256
257
258
259
260
261 func canMergeLoadClobber(target, load, x *Value) bool {
262
263
264
265
266
267
268 if x.Uses != 1 {
269 return false
270 }
271 loopnest := x.Block.Func.loopnest()
272 loopnest.calculateDepths()
273 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
274 return false
275 }
276 return canMergeLoad(target, load)
277 }
278
279
280
281 func canMergeLoad(target, load *Value) bool {
282 if target.Block.ID != load.Block.ID {
283
284 return false
285 }
286
287
288
289 if load.Uses != 1 {
290 return false
291 }
292
293 mem := load.MemoryArg()
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310 var args []*Value
311 for _, a := range target.Args {
312 if a != load && a.Block.ID == target.Block.ID {
313 args = append(args, a)
314 }
315 }
316
317
318
319 var memPreds map[*Value]bool
320 for i := 0; len(args) > 0; i++ {
321 const limit = 100
322 if i >= limit {
323
324 return false
325 }
326 v := args[len(args)-1]
327 args = args[:len(args)-1]
328 if target.Block.ID != v.Block.ID {
329
330
331 continue
332 }
333 if v.Op == OpPhi {
334
335
336
337 continue
338 }
339 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
340
341
342 return false
343 }
344 if v.Op.SymEffect()&SymAddr != 0 {
345
346
347
348
349
350
351
352
353
354
355
356 return false
357 }
358 if v.Type.IsMemory() {
359 if memPreds == nil {
360
361
362
363 memPreds = make(map[*Value]bool)
364 m := mem
365 const limit = 50
366 for i := 0; i < limit; i++ {
367 if m.Op == OpPhi {
368
369
370 break
371 }
372 if m.Block.ID != target.Block.ID {
373 break
374 }
375 if !m.Type.IsMemory() {
376 break
377 }
378 memPreds[m] = true
379 if len(m.Args) == 0 {
380 break
381 }
382 m = m.MemoryArg()
383 }
384 }
385
386
387
388
389
390
391
392
393
394 if memPreds[v] {
395 continue
396 }
397 return false
398 }
399 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
400
401
402 continue
403 }
404 for _, a := range v.Args {
405 if target.Block.ID == a.Block.ID {
406 args = append(args, a)
407 }
408 }
409 }
410
411 return true
412 }
413
414
415 func isSameCall(sym interface{}, name string) bool {
416 fn := sym.(*AuxCall).Fn
417 return fn != nil && fn.String() == name
418 }
419
420
421 func canLoadUnaligned(c *Config) bool {
422 return c.ctxt.Arch.Alignment == 1
423 }
424
425
426 func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) }
427 func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) }
428 func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) }
429 func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) }
430
431
432 func ntz64(x int64) int { return bits.TrailingZeros64(uint64(x)) }
433 func ntz32(x int32) int { return bits.TrailingZeros32(uint32(x)) }
434 func ntz16(x int16) int { return bits.TrailingZeros16(uint16(x)) }
435 func ntz8(x int8) int { return bits.TrailingZeros8(uint8(x)) }
436
437 func oneBit(x int64) bool { return x&(x-1) == 0 && x != 0 }
438 func oneBit8(x int8) bool { return x&(x-1) == 0 && x != 0 }
439 func oneBit16(x int16) bool { return x&(x-1) == 0 && x != 0 }
440 func oneBit32(x int32) bool { return x&(x-1) == 0 && x != 0 }
441 func oneBit64(x int64) bool { return x&(x-1) == 0 && x != 0 }
442
443
444 func nto(x int64) int64 {
445 return int64(ntz64(^x))
446 }
447
448
449
450 func log8(n int8) int64 {
451 return int64(bits.Len8(uint8(n))) - 1
452 }
453 func log16(n int16) int64 {
454 return int64(bits.Len16(uint16(n))) - 1
455 }
456 func log32(n int32) int64 {
457 return int64(bits.Len32(uint32(n))) - 1
458 }
459 func log64(n int64) int64 {
460 return int64(bits.Len64(uint64(n))) - 1
461 }
462
463
464
465 func log2uint32(n int64) int64 {
466 return int64(bits.Len32(uint32(n))) - 1
467 }
468
469
470 func isPowerOfTwo8(n int8) bool {
471 return n > 0 && n&(n-1) == 0
472 }
473 func isPowerOfTwo16(n int16) bool {
474 return n > 0 && n&(n-1) == 0
475 }
476 func isPowerOfTwo32(n int32) bool {
477 return n > 0 && n&(n-1) == 0
478 }
479 func isPowerOfTwo64(n int64) bool {
480 return n > 0 && n&(n-1) == 0
481 }
482
483
484 func isUint64PowerOfTwo(in int64) bool {
485 n := uint64(in)
486 return n > 0 && n&(n-1) == 0
487 }
488
489
490 func isUint32PowerOfTwo(in int64) bool {
491 n := uint64(uint32(in))
492 return n > 0 && n&(n-1) == 0
493 }
494
495
496 func is32Bit(n int64) bool {
497 return n == int64(int32(n))
498 }
499
500
501 func is16Bit(n int64) bool {
502 return n == int64(int16(n))
503 }
504
505
506 func is8Bit(n int64) bool {
507 return n == int64(int8(n))
508 }
509
510
511 func isU8Bit(n int64) bool {
512 return n == int64(uint8(n))
513 }
514
515
516 func isU12Bit(n int64) bool {
517 return 0 <= n && n < (1<<12)
518 }
519
520
521 func isU16Bit(n int64) bool {
522 return n == int64(uint16(n))
523 }
524
525
526 func isU32Bit(n int64) bool {
527 return n == int64(uint32(n))
528 }
529
530
531 func is20Bit(n int64) bool {
532 return -(1<<19) <= n && n < (1<<19)
533 }
534
535
536 func b2i(b bool) int64 {
537 if b {
538 return 1
539 }
540 return 0
541 }
542
543
544 func b2i32(b bool) int32 {
545 if b {
546 return 1
547 }
548 return 0
549 }
550
551
552
553 func shiftIsBounded(v *Value) bool {
554 return v.AuxInt != 0
555 }
556
557
558
559 func canonLessThan(x, y *Value) bool {
560 if x.Op != y.Op {
561 return x.Op < y.Op
562 }
563 if !x.Pos.SameFileAndLine(y.Pos) {
564 return x.Pos.Before(y.Pos)
565 }
566 return x.ID < y.ID
567 }
568
569
570
571 func truncate64Fto32F(f float64) float32 {
572 if !isExactFloat32(f) {
573 panic("truncate64Fto32F: truncation is not exact")
574 }
575 if !math.IsNaN(f) {
576 return float32(f)
577 }
578
579
580 b := math.Float64bits(f)
581 m := b & ((1 << 52) - 1)
582
583 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
584 return math.Float32frombits(r)
585 }
586
587
588
589 func extend32Fto64F(f float32) float64 {
590 if !math.IsNaN(float64(f)) {
591 return float64(f)
592 }
593
594
595 b := uint64(math.Float32bits(f))
596
597 r := ((b << 32) & (1 << 63)) | (0x7ff << 52) | ((b & 0x7fffff) << (52 - 23))
598 return math.Float64frombits(r)
599 }
600
601
602 func DivisionNeedsFixUp(v *Value) bool {
603 return v.AuxInt == 0
604 }
605
606
607 func auxFrom64F(f float64) int64 {
608 if f != f {
609 panic("can't encode a NaN in AuxInt field")
610 }
611 return int64(math.Float64bits(f))
612 }
613
614
615 func auxFrom32F(f float32) int64 {
616 if f != f {
617 panic("can't encode a NaN in AuxInt field")
618 }
619 return int64(math.Float64bits(extend32Fto64F(f)))
620 }
621
622
623 func auxTo32F(i int64) float32 {
624 return truncate64Fto32F(math.Float64frombits(uint64(i)))
625 }
626
627
628 func auxTo64F(i int64) float64 {
629 return math.Float64frombits(uint64(i))
630 }
631
632 func auxIntToBool(i int64) bool {
633 if i == 0 {
634 return false
635 }
636 return true
637 }
638 func auxIntToInt8(i int64) int8 {
639 return int8(i)
640 }
641 func auxIntToInt16(i int64) int16 {
642 return int16(i)
643 }
644 func auxIntToInt32(i int64) int32 {
645 return int32(i)
646 }
647 func auxIntToInt64(i int64) int64 {
648 return i
649 }
650 func auxIntToUint8(i int64) uint8 {
651 return uint8(i)
652 }
653 func auxIntToFloat32(i int64) float32 {
654 return float32(math.Float64frombits(uint64(i)))
655 }
656 func auxIntToFloat64(i int64) float64 {
657 return math.Float64frombits(uint64(i))
658 }
659 func auxIntToValAndOff(i int64) ValAndOff {
660 return ValAndOff(i)
661 }
662 func auxIntToArm64BitField(i int64) arm64BitField {
663 return arm64BitField(i)
664 }
665 func auxIntToInt128(x int64) int128 {
666 if x != 0 {
667 panic("nonzero int128 not allowed")
668 }
669 return 0
670 }
671 func auxIntToFlagConstant(x int64) flagConstant {
672 return flagConstant(x)
673 }
674
675 func auxIntToOp(cc int64) Op {
676 return Op(cc)
677 }
678
679 func boolToAuxInt(b bool) int64 {
680 if b {
681 return 1
682 }
683 return 0
684 }
685 func int8ToAuxInt(i int8) int64 {
686 return int64(i)
687 }
688 func int16ToAuxInt(i int16) int64 {
689 return int64(i)
690 }
691 func int32ToAuxInt(i int32) int64 {
692 return int64(i)
693 }
694 func int64ToAuxInt(i int64) int64 {
695 return int64(i)
696 }
697 func uint8ToAuxInt(i uint8) int64 {
698 return int64(int8(i))
699 }
700 func float32ToAuxInt(f float32) int64 {
701 return int64(math.Float64bits(float64(f)))
702 }
703 func float64ToAuxInt(f float64) int64 {
704 return int64(math.Float64bits(f))
705 }
706 func valAndOffToAuxInt(v ValAndOff) int64 {
707 return int64(v)
708 }
709 func arm64BitFieldToAuxInt(v arm64BitField) int64 {
710 return int64(v)
711 }
712 func int128ToAuxInt(x int128) int64 {
713 if x != 0 {
714 panic("nonzero int128 not allowed")
715 }
716 return 0
717 }
718 func flagConstantToAuxInt(x flagConstant) int64 {
719 return int64(x)
720 }
721
722 func opToAuxInt(o Op) int64 {
723 return int64(o)
724 }
725
726
727 type Aux interface {
728 CanBeAnSSAAux()
729 }
730
731
732 type auxMark bool
733
734 func (auxMark) CanBeAnSSAAux() {}
735
736 var AuxMark auxMark
737
738
739 type stringAux string
740
741 func (stringAux) CanBeAnSSAAux() {}
742
743 func auxToString(i Aux) string {
744 return string(i.(stringAux))
745 }
746 func auxToSym(i Aux) Sym {
747
748 s, _ := i.(Sym)
749 return s
750 }
751 func auxToType(i Aux) *types.Type {
752 return i.(*types.Type)
753 }
754 func auxToCall(i Aux) *AuxCall {
755 return i.(*AuxCall)
756 }
757 func auxToS390xCCMask(i Aux) s390x.CCMask {
758 return i.(s390x.CCMask)
759 }
760 func auxToS390xRotateParams(i Aux) s390x.RotateParams {
761 return i.(s390x.RotateParams)
762 }
763
764 func StringToAux(s string) Aux {
765 return stringAux(s)
766 }
767 func symToAux(s Sym) Aux {
768 return s
769 }
770 func callToAux(s *AuxCall) Aux {
771 return s
772 }
773 func typeToAux(t *types.Type) Aux {
774 return t
775 }
776 func s390xCCMaskToAux(c s390x.CCMask) Aux {
777 return c
778 }
779 func s390xRotateParamsToAux(r s390x.RotateParams) Aux {
780 return r
781 }
782
783
784 func uaddOvf(a, b int64) bool {
785 return uint64(a)+uint64(b) < uint64(a)
786 }
787
788
789
790
791 func loadLSymOffset(lsym *obj.LSym, offset int64) *obj.LSym {
792 if lsym.Type != objabi.SRODATA {
793 return nil
794 }
795
796 for _, r := range lsym.R {
797 if int64(r.Off) == offset && r.Type&^objabi.R_WEAK == objabi.R_ADDR && r.Add == 0 {
798 return r.Sym
799 }
800 }
801
802 return nil
803 }
804
805 func devirtLECall(v *Value, sym *obj.LSym) *Value {
806 v.Op = OpStaticLECall
807 auxcall := v.Aux.(*AuxCall)
808 auxcall.Fn = sym
809
810 v.Args[0].Uses--
811 copy(v.Args[0:], v.Args[1:])
812 v.Args[len(v.Args)-1] = nil
813 v.Args = v.Args[:len(v.Args)-1]
814 if f := v.Block.Func; f.pass.debug > 0 {
815 f.Warnl(v.Pos, "de-virtualizing call")
816 }
817 return v
818 }
819
820
821 func isSamePtr(p1, p2 *Value) bool {
822 if p1 == p2 {
823 return true
824 }
825 if p1.Op != p2.Op {
826 return false
827 }
828 switch p1.Op {
829 case OpOffPtr:
830 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
831 case OpAddr, OpLocalAddr:
832 return p1.Aux == p2.Aux
833 case OpAddPtr:
834 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
835 }
836 return false
837 }
838
839 func isStackPtr(v *Value) bool {
840 for v.Op == OpOffPtr || v.Op == OpAddPtr {
841 v = v.Args[0]
842 }
843 return v.Op == OpSP || v.Op == OpLocalAddr
844 }
845
846
847
848
849 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
850 if n1 == 0 || n2 == 0 {
851 return true
852 }
853 if p1 == p2 {
854 return false
855 }
856 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
857 base, offset = ptr, 0
858 for base.Op == OpOffPtr {
859 offset += base.AuxInt
860 base = base.Args[0]
861 }
862 if opcodeTable[base.Op].nilCheck {
863 base = base.Args[0]
864 }
865 return base, offset
866 }
867 p1, off1 := baseAndOffset(p1)
868 p2, off2 := baseAndOffset(p2)
869 if isSamePtr(p1, p2) {
870 return !overlap(off1, n1, off2, n2)
871 }
872
873
874
875
876 switch p1.Op {
877 case OpAddr, OpLocalAddr:
878 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
879 return true
880 }
881 return (p2.Op == OpArg || p2.Op == OpArgIntReg) && p1.Args[0].Op == OpSP
882 case OpArg, OpArgIntReg:
883 if p2.Op == OpSP || p2.Op == OpLocalAddr {
884 return true
885 }
886 case OpSP:
887 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpArgIntReg || p2.Op == OpSP
888 }
889 return false
890 }
891
892
893 func moveSize(align int64, c *Config) int64 {
894 switch {
895 case align%8 == 0 && c.PtrSize == 8:
896 return 8
897 case align%4 == 0:
898 return 4
899 case align%2 == 0:
900 return 2
901 }
902 return 1
903 }
904
905
906
907
908 func mergePoint(b *Block, a ...*Value) *Block {
909
910
911
912 d := 100
913
914 for d > 0 {
915 for _, x := range a {
916 if b == x.Block {
917 goto found
918 }
919 }
920 if len(b.Preds) > 1 {
921
922 return nil
923 }
924 b = b.Preds[0].b
925 d--
926 }
927 return nil
928 found:
929
930
931 r := b
932
933
934 na := 0
935 for d > 0 {
936 for _, x := range a {
937 if b == x.Block {
938 na++
939 }
940 }
941 if na == len(a) {
942
943 return r
944 }
945 if len(b.Preds) > 1 {
946 return nil
947 }
948 b = b.Preds[0].b
949 d--
950
951 }
952 return nil
953 }
954
955
956
957
958
959
960 func clobber(vv ...*Value) bool {
961 for _, v := range vv {
962 v.reset(OpInvalid)
963
964 }
965 return true
966 }
967
968
969
970
971 func clobberIfDead(v *Value) bool {
972 if v.Uses == 1 {
973 v.reset(OpInvalid)
974 }
975
976 return true
977 }
978
979
980
981
982
983
984
985 func noteRule(s string) bool {
986 fmt.Println(s)
987 return true
988 }
989
990
991
992
993
994
995 func countRule(v *Value, key string) bool {
996 f := v.Block.Func
997 if f.ruleMatches == nil {
998 f.ruleMatches = make(map[string]int)
999 }
1000 f.ruleMatches[key]++
1001 return true
1002 }
1003
1004
1005
1006 func warnRule(cond bool, v *Value, s string) bool {
1007 if pos := v.Pos; pos.Line() > 1 && cond {
1008 v.Block.Func.Warnl(pos, s)
1009 }
1010 return true
1011 }
1012
1013
1014 func flagArg(v *Value) *Value {
1015 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
1016 return nil
1017 }
1018 return v.Args[0]
1019 }
1020
1021
1022
1023
1024
1025
1026 func arm64Negate(op Op) Op {
1027 switch op {
1028 case OpARM64LessThan:
1029 return OpARM64GreaterEqual
1030 case OpARM64LessThanU:
1031 return OpARM64GreaterEqualU
1032 case OpARM64GreaterThan:
1033 return OpARM64LessEqual
1034 case OpARM64GreaterThanU:
1035 return OpARM64LessEqualU
1036 case OpARM64LessEqual:
1037 return OpARM64GreaterThan
1038 case OpARM64LessEqualU:
1039 return OpARM64GreaterThanU
1040 case OpARM64GreaterEqual:
1041 return OpARM64LessThan
1042 case OpARM64GreaterEqualU:
1043 return OpARM64LessThanU
1044 case OpARM64Equal:
1045 return OpARM64NotEqual
1046 case OpARM64NotEqual:
1047 return OpARM64Equal
1048 case OpARM64LessThanF:
1049 return OpARM64NotLessThanF
1050 case OpARM64NotLessThanF:
1051 return OpARM64LessThanF
1052 case OpARM64LessEqualF:
1053 return OpARM64NotLessEqualF
1054 case OpARM64NotLessEqualF:
1055 return OpARM64LessEqualF
1056 case OpARM64GreaterThanF:
1057 return OpARM64NotGreaterThanF
1058 case OpARM64NotGreaterThanF:
1059 return OpARM64GreaterThanF
1060 case OpARM64GreaterEqualF:
1061 return OpARM64NotGreaterEqualF
1062 case OpARM64NotGreaterEqualF:
1063 return OpARM64GreaterEqualF
1064 default:
1065 panic("unreachable")
1066 }
1067 }
1068
1069
1070
1071
1072
1073
1074 func arm64Invert(op Op) Op {
1075 switch op {
1076 case OpARM64LessThan:
1077 return OpARM64GreaterThan
1078 case OpARM64LessThanU:
1079 return OpARM64GreaterThanU
1080 case OpARM64GreaterThan:
1081 return OpARM64LessThan
1082 case OpARM64GreaterThanU:
1083 return OpARM64LessThanU
1084 case OpARM64LessEqual:
1085 return OpARM64GreaterEqual
1086 case OpARM64LessEqualU:
1087 return OpARM64GreaterEqualU
1088 case OpARM64GreaterEqual:
1089 return OpARM64LessEqual
1090 case OpARM64GreaterEqualU:
1091 return OpARM64LessEqualU
1092 case OpARM64Equal, OpARM64NotEqual:
1093 return op
1094 case OpARM64LessThanF:
1095 return OpARM64GreaterThanF
1096 case OpARM64GreaterThanF:
1097 return OpARM64LessThanF
1098 case OpARM64LessEqualF:
1099 return OpARM64GreaterEqualF
1100 case OpARM64GreaterEqualF:
1101 return OpARM64LessEqualF
1102 case OpARM64NotLessThanF:
1103 return OpARM64NotGreaterThanF
1104 case OpARM64NotGreaterThanF:
1105 return OpARM64NotLessThanF
1106 case OpARM64NotLessEqualF:
1107 return OpARM64NotGreaterEqualF
1108 case OpARM64NotGreaterEqualF:
1109 return OpARM64NotLessEqualF
1110 default:
1111 panic("unreachable")
1112 }
1113 }
1114
1115
1116
1117
1118 func ccARM64Eval(op Op, flags *Value) int {
1119 fop := flags.Op
1120 if fop == OpARM64InvertFlags {
1121 return -ccARM64Eval(op, flags.Args[0])
1122 }
1123 if fop != OpARM64FlagConstant {
1124 return 0
1125 }
1126 fc := flagConstant(flags.AuxInt)
1127 b2i := func(b bool) int {
1128 if b {
1129 return 1
1130 }
1131 return -1
1132 }
1133 switch op {
1134 case OpARM64Equal:
1135 return b2i(fc.eq())
1136 case OpARM64NotEqual:
1137 return b2i(fc.ne())
1138 case OpARM64LessThan:
1139 return b2i(fc.lt())
1140 case OpARM64LessThanU:
1141 return b2i(fc.ult())
1142 case OpARM64GreaterThan:
1143 return b2i(fc.gt())
1144 case OpARM64GreaterThanU:
1145 return b2i(fc.ugt())
1146 case OpARM64LessEqual:
1147 return b2i(fc.le())
1148 case OpARM64LessEqualU:
1149 return b2i(fc.ule())
1150 case OpARM64GreaterEqual:
1151 return b2i(fc.ge())
1152 case OpARM64GreaterEqualU:
1153 return b2i(fc.uge())
1154 }
1155 return 0
1156 }
1157
1158
1159
1160 func logRule(s string) {
1161 if ruleFile == nil {
1162
1163
1164
1165
1166
1167
1168 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
1169 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
1170 if err != nil {
1171 panic(err)
1172 }
1173 ruleFile = w
1174 }
1175 _, err := fmt.Fprintln(ruleFile, s)
1176 if err != nil {
1177 panic(err)
1178 }
1179 }
1180
1181 var ruleFile io.Writer
1182
1183 func min(x, y int64) int64 {
1184 if x < y {
1185 return x
1186 }
1187 return y
1188 }
1189 func max(x, y int64) int64 {
1190 if x > y {
1191 return x
1192 }
1193 return y
1194 }
1195
1196 func isConstZero(v *Value) bool {
1197 switch v.Op {
1198 case OpConstNil:
1199 return true
1200 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
1201 return v.AuxInt == 0
1202 }
1203 return false
1204 }
1205
1206
1207 func reciprocalExact64(c float64) bool {
1208 b := math.Float64bits(c)
1209 man := b & (1<<52 - 1)
1210 if man != 0 {
1211 return false
1212 }
1213 exp := b >> 52 & (1<<11 - 1)
1214
1215
1216 switch exp {
1217 case 0:
1218 return false
1219 case 0x7ff:
1220 return false
1221 case 0x7fe:
1222 return false
1223 default:
1224 return true
1225 }
1226 }
1227
1228
1229 func reciprocalExact32(c float32) bool {
1230 b := math.Float32bits(c)
1231 man := b & (1<<23 - 1)
1232 if man != 0 {
1233 return false
1234 }
1235 exp := b >> 23 & (1<<8 - 1)
1236
1237
1238 switch exp {
1239 case 0:
1240 return false
1241 case 0xff:
1242 return false
1243 case 0xfe:
1244 return false
1245 default:
1246 return true
1247 }
1248 }
1249
1250
1251 func isARMImmRot(v uint32) bool {
1252 for i := 0; i < 16; i++ {
1253 if v&^0xff == 0 {
1254 return true
1255 }
1256 v = v<<2 | v>>30
1257 }
1258
1259 return false
1260 }
1261
1262
1263
1264 func overlap(offset1, size1, offset2, size2 int64) bool {
1265 if offset1 >= offset2 && offset2+size2 > offset1 {
1266 return true
1267 }
1268 if offset2 >= offset1 && offset1+size1 > offset2 {
1269 return true
1270 }
1271 return false
1272 }
1273
1274 func areAdjacentOffsets(off1, off2, size int64) bool {
1275 return off1+size == off2 || off1 == off2+size
1276 }
1277
1278
1279
1280
1281 func zeroUpper32Bits(x *Value, depth int) bool {
1282 switch x.Op {
1283 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
1284 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
1285 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
1286 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
1287 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
1288 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
1289 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL,
1290 OpAMD64SHRL, OpAMD64SHRLconst, OpAMD64SARL, OpAMD64SARLconst,
1291 OpAMD64SHLL, OpAMD64SHLLconst:
1292 return true
1293 case OpARM64REV16W, OpARM64REVW, OpARM64RBITW, OpARM64CLZW, OpARM64EXTRWconst,
1294 OpARM64MULW, OpARM64MNEGW, OpARM64UDIVW, OpARM64DIVW, OpARM64UMODW,
1295 OpARM64MADDW, OpARM64MSUBW, OpARM64RORW, OpARM64RORWconst:
1296 return true
1297 case OpArg:
1298
1299
1300 return x.Type.Size() == 4 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64")
1301 case OpPhi, OpSelect0, OpSelect1:
1302
1303
1304 if depth <= 0 {
1305 return false
1306 }
1307 for i := range x.Args {
1308 if !zeroUpper32Bits(x.Args[i], depth-1) {
1309 return false
1310 }
1311 }
1312 return true
1313
1314 }
1315 return false
1316 }
1317
1318
1319 func zeroUpper48Bits(x *Value, depth int) bool {
1320 switch x.Op {
1321 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1322 return true
1323 case OpArg:
1324 return x.Type.Size() == 2 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64")
1325 case OpPhi, OpSelect0, OpSelect1:
1326
1327
1328 if depth <= 0 {
1329 return false
1330 }
1331 for i := range x.Args {
1332 if !zeroUpper48Bits(x.Args[i], depth-1) {
1333 return false
1334 }
1335 }
1336 return true
1337
1338 }
1339 return false
1340 }
1341
1342
1343 func zeroUpper56Bits(x *Value, depth int) bool {
1344 switch x.Op {
1345 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1346 return true
1347 case OpArg:
1348 return x.Type.Size() == 1 && (x.Type.IsUnsigned() || x.Block.Func.Config.arch == "amd64")
1349 case OpPhi, OpSelect0, OpSelect1:
1350
1351
1352 if depth <= 0 {
1353 return false
1354 }
1355 for i := range x.Args {
1356 if !zeroUpper56Bits(x.Args[i], depth-1) {
1357 return false
1358 }
1359 }
1360 return true
1361
1362 }
1363 return false
1364 }
1365
1366 func isInlinableMemclr(c *Config, sz int64) bool {
1367 if sz < 0 {
1368 return false
1369 }
1370
1371
1372 switch c.arch {
1373 case "amd64", "arm64":
1374 return true
1375 case "ppc64le", "ppc64":
1376 return sz < 512
1377 }
1378 return false
1379 }
1380
1381
1382
1383
1384
1385
1386 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1387
1388
1389
1390
1391 switch c.arch {
1392 case "amd64":
1393 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1394 case "386", "arm64":
1395 return sz <= 8
1396 case "s390x", "ppc64", "ppc64le":
1397 return sz <= 8 || disjoint(dst, sz, src, sz)
1398 case "arm", "loong64", "mips", "mips64", "mipsle", "mips64le":
1399 return sz <= 4
1400 }
1401 return false
1402 }
1403 func IsInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1404 return isInlinableMemmove(dst, src, sz, c)
1405 }
1406
1407
1408
1409
1410 func logLargeCopy(v *Value, s int64) bool {
1411 if s < 128 {
1412 return true
1413 }
1414 if logopt.Enabled() {
1415 logopt.LogOpt(v.Pos, "copy", "lower", v.Block.Func.Name, fmt.Sprintf("%d bytes", s))
1416 }
1417 return true
1418 }
1419 func LogLargeCopy(funcName string, pos src.XPos, s int64) {
1420 if s < 128 {
1421 return
1422 }
1423 if logopt.Enabled() {
1424 logopt.LogOpt(pos, "copy", "lower", funcName, fmt.Sprintf("%d bytes", s))
1425 }
1426 }
1427
1428
1429
1430 func hasSmallRotate(c *Config) bool {
1431 switch c.arch {
1432 case "amd64", "386":
1433 return true
1434 default:
1435 return false
1436 }
1437 }
1438
1439 func supportsPPC64PCRel() bool {
1440
1441
1442 return buildcfg.GOPPC64 >= 10 && buildcfg.GOOS == "linux"
1443 }
1444
1445 func newPPC64ShiftAuxInt(sh, mb, me, sz int64) int32 {
1446 if sh < 0 || sh >= sz {
1447 panic("PPC64 shift arg sh out of range")
1448 }
1449 if mb < 0 || mb >= sz {
1450 panic("PPC64 shift arg mb out of range")
1451 }
1452 if me < 0 || me >= sz {
1453 panic("PPC64 shift arg me out of range")
1454 }
1455 return int32(sh<<16 | mb<<8 | me)
1456 }
1457
1458 func GetPPC64Shiftsh(auxint int64) int64 {
1459 return int64(int8(auxint >> 16))
1460 }
1461
1462 func GetPPC64Shiftmb(auxint int64) int64 {
1463 return int64(int8(auxint >> 8))
1464 }
1465
1466 func GetPPC64Shiftme(auxint int64) int64 {
1467 return int64(int8(auxint))
1468 }
1469
1470
1471
1472
1473
1474 func isPPC64WordRotateMask(v64 int64) bool {
1475
1476 v := uint32(v64)
1477 vp := (v & -v) + v
1478
1479 vn := ^v
1480 vpn := (vn & -vn) + vn
1481 return (v&vp == 0 || vn&vpn == 0) && v != 0
1482 }
1483
1484
1485
1486
1487 func encodePPC64RotateMask(rotate, mask, nbits int64) int64 {
1488 var mb, me, mbn, men int
1489
1490
1491 if mask == 0 || ^mask == 0 || rotate >= nbits {
1492 panic(fmt.Sprintf("invalid PPC64 rotate mask: %x %d %d", uint64(mask), rotate, nbits))
1493 } else if nbits == 32 {
1494 mb = bits.LeadingZeros32(uint32(mask))
1495 me = 32 - bits.TrailingZeros32(uint32(mask))
1496 mbn = bits.LeadingZeros32(^uint32(mask))
1497 men = 32 - bits.TrailingZeros32(^uint32(mask))
1498 } else {
1499 mb = bits.LeadingZeros64(uint64(mask))
1500 me = 64 - bits.TrailingZeros64(uint64(mask))
1501 mbn = bits.LeadingZeros64(^uint64(mask))
1502 men = 64 - bits.TrailingZeros64(^uint64(mask))
1503 }
1504
1505 if mb == 0 && me == int(nbits) {
1506
1507 mb, me = men, mbn
1508 }
1509
1510 return int64(me) | int64(mb<<8) | int64(rotate<<16) | int64(nbits<<24)
1511 }
1512
1513
1514
1515
1516
1517
1518 func mergePPC64RLDICLandSRDconst(encoded, s int64) int64 {
1519 mb := s
1520 r := 64 - s
1521
1522 if (encoded>>8)&0xFF < mb {
1523 encoded = (encoded &^ 0xFF00) | mb<<8
1524 }
1525
1526 if (encoded & 0xFF0000) != 0 {
1527 panic("non-zero rotate")
1528 }
1529 return encoded | r<<16
1530 }
1531
1532
1533
1534 func DecodePPC64RotateMask(sauxint int64) (rotate, mb, me int64, mask uint64) {
1535 auxint := uint64(sauxint)
1536 rotate = int64((auxint >> 16) & 0xFF)
1537 mb = int64((auxint >> 8) & 0xFF)
1538 me = int64((auxint >> 0) & 0xFF)
1539 nbits := int64((auxint >> 24) & 0xFF)
1540 mask = ((1 << uint(nbits-mb)) - 1) ^ ((1 << uint(nbits-me)) - 1)
1541 if mb > me {
1542 mask = ^mask
1543 }
1544 if nbits == 32 {
1545 mask = uint64(uint32(mask))
1546 }
1547
1548
1549
1550 me = (me - 1) & (nbits - 1)
1551 return
1552 }
1553
1554
1555
1556
1557 func isPPC64ValidShiftMask(v int64) bool {
1558 if (v != 0) && ((v+1)&v) == 0 {
1559 return true
1560 }
1561 return false
1562 }
1563
1564 func getPPC64ShiftMaskLength(v int64) int64 {
1565 return int64(bits.Len64(uint64(v)))
1566 }
1567
1568
1569
1570 func mergePPC64RShiftMask(m, s, nbits int64) int64 {
1571 smask := uint64((1<<uint(nbits))-1) >> uint(s)
1572 return m & int64(smask)
1573 }
1574
1575
1576 func mergePPC64AndSrwi(m, s int64) int64 {
1577 mask := mergePPC64RShiftMask(m, s, 32)
1578 if !isPPC64WordRotateMask(mask) {
1579 return 0
1580 }
1581 return encodePPC64RotateMask((32-s)&31, mask, 32)
1582 }
1583
1584
1585
1586 func mergePPC64ClrlsldiSrw(sld, srw int64) int64 {
1587 mask_1 := uint64(0xFFFFFFFF >> uint(srw))
1588
1589 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1590
1591
1592 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1593
1594 r_1 := 32 - srw
1595 r_2 := GetPPC64Shiftsh(sld)
1596 r_3 := (r_1 + r_2) & 31
1597
1598 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1599 return 0
1600 }
1601 return encodePPC64RotateMask(int64(r_3), int64(mask_3), 32)
1602 }
1603
1604
1605
1606 func mergePPC64ClrlsldiRlwinm(sld int32, rlw int64) int64 {
1607 r_1, _, _, mask_1 := DecodePPC64RotateMask(rlw)
1608
1609 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1610
1611
1612 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(int64(sld)))
1613 r_2 := GetPPC64Shiftsh(int64(sld))
1614 r_3 := (r_1 + r_2) & 31
1615
1616
1617 if !isPPC64WordRotateMask(int64(mask_3)) || uint64(uint32(mask_3)) != mask_3 {
1618 return 0
1619 }
1620 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1621 }
1622
1623
1624
1625 func mergePPC64SldiSrw(sld, srw int64) int64 {
1626 if sld > srw || srw >= 32 {
1627 return 0
1628 }
1629 mask_r := uint32(0xFFFFFFFF) >> uint(srw)
1630 mask_l := uint32(0xFFFFFFFF) >> uint(sld)
1631 mask := (mask_r & mask_l) << uint(sld)
1632 return encodePPC64RotateMask((32-srw+sld)&31, int64(mask), 32)
1633 }
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660 func convertPPC64OpToOpCC(op *Value) *Value {
1661 ccOpMap := map[Op]Op{
1662 OpPPC64ADD: OpPPC64ADDCC,
1663 OpPPC64ADDconst: OpPPC64ADDCCconst,
1664 OpPPC64AND: OpPPC64ANDCC,
1665 OpPPC64ANDN: OpPPC64ANDNCC,
1666 OpPPC64CNTLZD: OpPPC64CNTLZDCC,
1667 OpPPC64OR: OpPPC64ORCC,
1668 OpPPC64SUB: OpPPC64SUBCC,
1669 OpPPC64NEG: OpPPC64NEGCC,
1670 OpPPC64NOR: OpPPC64NORCC,
1671 OpPPC64XOR: OpPPC64XORCC,
1672 }
1673 b := op.Block
1674 opCC := b.NewValue0I(op.Pos, ccOpMap[op.Op], types.NewTuple(op.Type, types.TypeFlags), op.AuxInt)
1675 opCC.AddArgs(op.Args...)
1676 op.reset(OpSelect0)
1677 op.AddArgs(opCC)
1678 return op
1679 }
1680
1681
1682 func rotateLeft32(v, rotate int64) int64 {
1683 return int64(bits.RotateLeft32(uint32(v), int(rotate)))
1684 }
1685
1686 func rotateRight64(v, rotate int64) int64 {
1687 return int64(bits.RotateLeft64(uint64(v), int(-rotate)))
1688 }
1689
1690
1691 func armBFAuxInt(lsb, width int64) arm64BitField {
1692 if lsb < 0 || lsb > 63 {
1693 panic("ARM(64) bit field lsb constant out of range")
1694 }
1695 if width < 1 || lsb+width > 64 {
1696 panic("ARM(64) bit field width constant out of range")
1697 }
1698 return arm64BitField(width | lsb<<8)
1699 }
1700
1701
1702 func (bfc arm64BitField) getARM64BFlsb() int64 {
1703 return int64(uint64(bfc) >> 8)
1704 }
1705
1706
1707 func (bfc arm64BitField) getARM64BFwidth() int64 {
1708 return int64(bfc) & 0xff
1709 }
1710
1711
1712 func isARM64BFMask(lsb, mask, rshift int64) bool {
1713 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1714 return shiftedMask != 0 && isPowerOfTwo64(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1715 }
1716
1717
1718 func arm64BFWidth(mask, rshift int64) int64 {
1719 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1720 if shiftedMask == 0 {
1721 panic("ARM64 BF mask is zero")
1722 }
1723 return nto(shiftedMask)
1724 }
1725
1726
1727
1728 func sizeof(t interface{}) int64 {
1729 return t.(*types.Type).Size()
1730 }
1731
1732
1733
1734
1735 func registerizable(b *Block, typ *types.Type) bool {
1736 if typ.IsPtrShaped() || typ.IsFloat() || typ.IsBoolean() {
1737 return true
1738 }
1739 if typ.IsInteger() {
1740 return typ.Size() <= b.Func.Config.RegSize
1741 }
1742 return false
1743 }
1744
1745
1746 func needRaceCleanup(sym *AuxCall, v *Value) bool {
1747 f := v.Block.Func
1748 if !f.Config.Race {
1749 return false
1750 }
1751 if !isSameCall(sym, "runtime.racefuncenter") && !isSameCall(sym, "runtime.racefuncexit") {
1752 return false
1753 }
1754 for _, b := range f.Blocks {
1755 for _, v := range b.Values {
1756 switch v.Op {
1757 case OpStaticCall, OpStaticLECall:
1758
1759
1760 s := v.Aux.(*AuxCall).Fn.String()
1761 switch s {
1762 case "runtime.racefuncenter", "runtime.racefuncexit",
1763 "runtime.panicdivide", "runtime.panicwrap",
1764 "runtime.panicshift":
1765 continue
1766 }
1767
1768
1769 return false
1770 case OpPanicBounds, OpPanicExtend:
1771
1772 case OpClosureCall, OpInterCall, OpClosureLECall, OpInterLECall:
1773
1774 return false
1775 }
1776 }
1777 }
1778 if isSameCall(sym, "runtime.racefuncenter") {
1779
1780
1781 if v.Args[0].Op != OpStore {
1782 if v.Op == OpStaticLECall {
1783
1784 return true
1785 }
1786 return false
1787 }
1788 mem := v.Args[0].Args[2]
1789 v.Args[0].reset(OpCopy)
1790 v.Args[0].AddArg(mem)
1791 }
1792 return true
1793 }
1794
1795
1796 func symIsRO(sym interface{}) bool {
1797 lsym := sym.(*obj.LSym)
1798 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
1799 }
1800
1801
1802 func symIsROZero(sym Sym) bool {
1803 lsym := sym.(*obj.LSym)
1804 if lsym.Type != objabi.SRODATA || len(lsym.R) != 0 {
1805 return false
1806 }
1807 for _, b := range lsym.P {
1808 if b != 0 {
1809 return false
1810 }
1811 }
1812 return true
1813 }
1814
1815
1816
1817 func isFixed32(c *Config, sym Sym, off int64) bool {
1818 return isFixed(c, sym, off, 4)
1819 }
1820
1821
1822
1823 func isFixed(c *Config, sym Sym, off, size int64) bool {
1824 lsym := sym.(*obj.LSym)
1825 if lsym.Extra == nil {
1826 return false
1827 }
1828 if _, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
1829 if off == 2*c.PtrSize && size == 4 {
1830 return true
1831 }
1832 }
1833 return false
1834 }
1835 func fixed32(c *Config, sym Sym, off int64) int32 {
1836 lsym := sym.(*obj.LSym)
1837 if ti, ok := (*lsym.Extra).(*obj.TypeInfo); ok {
1838 if off == 2*c.PtrSize {
1839 return int32(types.TypeHash(ti.Type.(*types.Type)))
1840 }
1841 }
1842 base.Fatalf("fixed32 data not known for %s:%d", sym, off)
1843 return 0
1844 }
1845
1846
1847
1848 func isFixedSym(sym Sym, off int64) bool {
1849 lsym := sym.(*obj.LSym)
1850 switch {
1851 case lsym.Type == objabi.SRODATA:
1852
1853 default:
1854 return false
1855 }
1856 for _, r := range lsym.R {
1857 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off && r.Add == 0 {
1858 return true
1859 }
1860 }
1861 return false
1862 }
1863 func fixedSym(f *Func, sym Sym, off int64) Sym {
1864 lsym := sym.(*obj.LSym)
1865 for _, r := range lsym.R {
1866 if (r.Type == objabi.R_ADDR || r.Type == objabi.R_WEAKADDR) && int64(r.Off) == off {
1867 if strings.HasPrefix(r.Sym.Name, "type:") {
1868
1869
1870
1871
1872
1873 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
1874 } else if strings.HasPrefix(r.Sym.Name, "go:itab") {
1875
1876
1877 reflectdata.MarkTypeSymUsedInInterface(r.Sym, f.fe.Func().Linksym())
1878 }
1879 return r.Sym
1880 }
1881 }
1882 base.Fatalf("fixedSym data not known for %s:%d", sym, off)
1883 return nil
1884 }
1885
1886
1887 func read8(sym interface{}, off int64) uint8 {
1888 lsym := sym.(*obj.LSym)
1889 if off >= int64(len(lsym.P)) || off < 0 {
1890
1891
1892
1893
1894 return 0
1895 }
1896 return lsym.P[off]
1897 }
1898
1899
1900 func read16(sym interface{}, off int64, byteorder binary.ByteOrder) uint16 {
1901 lsym := sym.(*obj.LSym)
1902
1903
1904 var src []byte
1905 if 0 <= off && off < int64(len(lsym.P)) {
1906 src = lsym.P[off:]
1907 }
1908 buf := make([]byte, 2)
1909 copy(buf, src)
1910 return byteorder.Uint16(buf)
1911 }
1912
1913
1914 func read32(sym interface{}, off int64, byteorder binary.ByteOrder) uint32 {
1915 lsym := sym.(*obj.LSym)
1916 var src []byte
1917 if 0 <= off && off < int64(len(lsym.P)) {
1918 src = lsym.P[off:]
1919 }
1920 buf := make([]byte, 4)
1921 copy(buf, src)
1922 return byteorder.Uint32(buf)
1923 }
1924
1925
1926 func read64(sym interface{}, off int64, byteorder binary.ByteOrder) uint64 {
1927 lsym := sym.(*obj.LSym)
1928 var src []byte
1929 if 0 <= off && off < int64(len(lsym.P)) {
1930 src = lsym.P[off:]
1931 }
1932 buf := make([]byte, 8)
1933 copy(buf, src)
1934 return byteorder.Uint64(buf)
1935 }
1936
1937
1938 func sequentialAddresses(x, y *Value, n int64) bool {
1939 if x == y && n == 0 {
1940 return true
1941 }
1942 if x.Op == Op386ADDL && y.Op == Op386LEAL1 && y.AuxInt == n && y.Aux == nil &&
1943 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1944 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1945 return true
1946 }
1947 if x.Op == Op386LEAL1 && y.Op == Op386LEAL1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
1948 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1949 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1950 return true
1951 }
1952 if x.Op == OpAMD64ADDQ && y.Op == OpAMD64LEAQ1 && y.AuxInt == n && y.Aux == nil &&
1953 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1954 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1955 return true
1956 }
1957 if x.Op == OpAMD64LEAQ1 && y.Op == OpAMD64LEAQ1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
1958 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1959 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1960 return true
1961 }
1962 return false
1963 }
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977 type flagConstant uint8
1978
1979
1980 func (fc flagConstant) N() bool {
1981 return fc&1 != 0
1982 }
1983
1984
1985 func (fc flagConstant) Z() bool {
1986 return fc&2 != 0
1987 }
1988
1989
1990
1991 func (fc flagConstant) C() bool {
1992 return fc&4 != 0
1993 }
1994
1995
1996 func (fc flagConstant) V() bool {
1997 return fc&8 != 0
1998 }
1999
2000 func (fc flagConstant) eq() bool {
2001 return fc.Z()
2002 }
2003 func (fc flagConstant) ne() bool {
2004 return !fc.Z()
2005 }
2006 func (fc flagConstant) lt() bool {
2007 return fc.N() != fc.V()
2008 }
2009 func (fc flagConstant) le() bool {
2010 return fc.Z() || fc.lt()
2011 }
2012 func (fc flagConstant) gt() bool {
2013 return !fc.Z() && fc.ge()
2014 }
2015 func (fc flagConstant) ge() bool {
2016 return fc.N() == fc.V()
2017 }
2018 func (fc flagConstant) ult() bool {
2019 return !fc.C()
2020 }
2021 func (fc flagConstant) ule() bool {
2022 return fc.Z() || fc.ult()
2023 }
2024 func (fc flagConstant) ugt() bool {
2025 return !fc.Z() && fc.uge()
2026 }
2027 func (fc flagConstant) uge() bool {
2028 return fc.C()
2029 }
2030
2031 func (fc flagConstant) ltNoov() bool {
2032 return fc.lt() && !fc.V()
2033 }
2034 func (fc flagConstant) leNoov() bool {
2035 return fc.le() && !fc.V()
2036 }
2037 func (fc flagConstant) gtNoov() bool {
2038 return fc.gt() && !fc.V()
2039 }
2040 func (fc flagConstant) geNoov() bool {
2041 return fc.ge() && !fc.V()
2042 }
2043
2044 func (fc flagConstant) String() string {
2045 return fmt.Sprintf("N=%v,Z=%v,C=%v,V=%v", fc.N(), fc.Z(), fc.C(), fc.V())
2046 }
2047
2048 type flagConstantBuilder struct {
2049 N bool
2050 Z bool
2051 C bool
2052 V bool
2053 }
2054
2055 func (fcs flagConstantBuilder) encode() flagConstant {
2056 var fc flagConstant
2057 if fcs.N {
2058 fc |= 1
2059 }
2060 if fcs.Z {
2061 fc |= 2
2062 }
2063 if fcs.C {
2064 fc |= 4
2065 }
2066 if fcs.V {
2067 fc |= 8
2068 }
2069 return fc
2070 }
2071
2072
2073
2074
2075
2076
2077 func addFlags64(x, y int64) flagConstant {
2078 var fcb flagConstantBuilder
2079 fcb.Z = x+y == 0
2080 fcb.N = x+y < 0
2081 fcb.C = uint64(x+y) < uint64(x)
2082 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2083 return fcb.encode()
2084 }
2085
2086
2087 func subFlags64(x, y int64) flagConstant {
2088 var fcb flagConstantBuilder
2089 fcb.Z = x-y == 0
2090 fcb.N = x-y < 0
2091 fcb.C = uint64(y) <= uint64(x)
2092 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2093 return fcb.encode()
2094 }
2095
2096
2097 func addFlags32(x, y int32) flagConstant {
2098 var fcb flagConstantBuilder
2099 fcb.Z = x+y == 0
2100 fcb.N = x+y < 0
2101 fcb.C = uint32(x+y) < uint32(x)
2102 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
2103 return fcb.encode()
2104 }
2105
2106
2107 func subFlags32(x, y int32) flagConstant {
2108 var fcb flagConstantBuilder
2109 fcb.Z = x-y == 0
2110 fcb.N = x-y < 0
2111 fcb.C = uint32(y) <= uint32(x)
2112 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
2113 return fcb.encode()
2114 }
2115
2116
2117
2118 func logicFlags64(x int64) flagConstant {
2119 var fcb flagConstantBuilder
2120 fcb.Z = x == 0
2121 fcb.N = x < 0
2122 return fcb.encode()
2123 }
2124
2125
2126
2127 func logicFlags32(x int32) flagConstant {
2128 var fcb flagConstantBuilder
2129 fcb.Z = x == 0
2130 fcb.N = x < 0
2131 return fcb.encode()
2132 }
2133
2134 func makeJumpTableSym(b *Block) *obj.LSym {
2135 s := base.Ctxt.Lookup(fmt.Sprintf("%s.jump%d", b.Func.fe.Func().LSym.Name, b.ID))
2136
2137 s.Set(obj.AttrStatic, true)
2138 return s
2139 }
2140
2141
2142
2143 func canRotate(c *Config, bits int64) bool {
2144 if bits > c.PtrSize*8 {
2145
2146 return false
2147 }
2148 switch c.arch {
2149 case "386", "amd64", "arm64":
2150 return true
2151 case "arm", "s390x", "ppc64", "ppc64le", "wasm", "loong64":
2152 return bits >= 32
2153 default:
2154 return false
2155 }
2156 }
2157
2158
2159 func isARM64bitcon(x uint64) bool {
2160 if x == 1<<64-1 || x == 0 {
2161 return false
2162 }
2163
2164 switch {
2165 case x != x>>32|x<<32:
2166
2167
2168 case x != x>>16|x<<48:
2169
2170 x = uint64(int64(int32(x)))
2171 case x != x>>8|x<<56:
2172
2173 x = uint64(int64(int16(x)))
2174 case x != x>>4|x<<60:
2175
2176 x = uint64(int64(int8(x)))
2177 default:
2178
2179
2180
2181
2182
2183 return true
2184 }
2185 return sequenceOfOnes(x) || sequenceOfOnes(^x)
2186 }
2187
2188
2189 func sequenceOfOnes(x uint64) bool {
2190 y := x & -x
2191 y += x
2192 return (y-1)&y == 0
2193 }
2194
2195
2196 func isARM64addcon(v int64) bool {
2197
2198 if v < 0 {
2199 return false
2200 }
2201 if (v & 0xFFF) == 0 {
2202 v >>= 12
2203 }
2204 return v <= 0xFFF
2205 }
2206
2207
2208
2209
2210 func setPos(v *Value, pos src.XPos) bool {
2211 v.Pos = pos
2212 return true
2213 }
2214
View as plain text