Source file
src/runtime/mbitmap_allocheaders.go
Documentation: runtime
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58 package runtime
59
60 import (
61 "internal/abi"
62 "internal/goarch"
63 "runtime/internal/sys"
64 "unsafe"
65 )
66
67 const (
68
69
70
71
72 mallocHeaderSize = 8
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102 minSizeForMallocHeader = goarch.PtrSize * ptrBits
103 )
104
105
106
107
108
109
110
111
112
113 func heapBitsInSpan(userSize uintptr) bool {
114
115
116 return userSize <= minSizeForMallocHeader
117 }
118
119
120 type heapArenaPtrScalar struct {
121
122 }
123
124
125
126
127
128 type typePointers struct {
129
130
131
132 elem uintptr
133
134
135
136 addr uintptr
137
138
139
140
141
142 mask uintptr
143
144
145
146 typ *_type
147 }
148
149
150
151
152
153
154
155
156
157
158
159
160 func (span *mspan) typePointersOf(addr, size uintptr) typePointers {
161 base := span.objBase(addr)
162 tp := span.typePointersOfUnchecked(base)
163 if base == addr && size == span.elemsize {
164 return tp
165 }
166 return tp.fastForward(addr-tp.addr, addr+size)
167 }
168
169
170
171
172
173
174
175
176
177 func (span *mspan) typePointersOfUnchecked(addr uintptr) typePointers {
178 const doubleCheck = false
179 if doubleCheck && span.objBase(addr) != addr {
180 print("runtime: addr=", addr, " base=", span.objBase(addr), "\n")
181 throw("typePointersOfUnchecked consisting of non-base-address for object")
182 }
183
184 spc := span.spanclass
185 if spc.noscan() {
186 return typePointers{}
187 }
188 if heapBitsInSpan(span.elemsize) {
189
190 return typePointers{elem: addr, addr: addr, mask: span.heapBitsSmallForAddr(addr)}
191 }
192
193
194 var typ *_type
195 if spc.sizeclass() != 0 {
196
197 typ = *(**_type)(unsafe.Pointer(addr))
198 addr += mallocHeaderSize
199 } else {
200 typ = span.largeType
201 }
202 gcdata := typ.GCData
203 return typePointers{elem: addr, addr: addr, mask: readUintptr(gcdata), typ: typ}
204 }
205
206
207
208
209
210
211
212
213
214
215
216 func (span *mspan) typePointersOfType(typ *abi.Type, addr uintptr) typePointers {
217 const doubleCheck = false
218 if doubleCheck && (typ == nil || typ.Kind_&kindGCProg != 0) {
219 throw("bad type passed to typePointersOfType")
220 }
221 if span.spanclass.noscan() {
222 return typePointers{}
223 }
224
225 gcdata := typ.GCData
226 return typePointers{elem: addr, addr: addr, mask: readUintptr(gcdata), typ: typ}
227 }
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249 func (tp typePointers) nextFast() (typePointers, uintptr) {
250
251 if tp.mask == 0 {
252 return tp, 0
253 }
254
255 var i int
256 if goarch.PtrSize == 8 {
257 i = sys.TrailingZeros64(uint64(tp.mask))
258 } else {
259 i = sys.TrailingZeros32(uint32(tp.mask))
260 }
261
262 tp.mask ^= uintptr(1) << (i & (ptrBits - 1))
263
264 return tp, tp.addr + uintptr(i)*goarch.PtrSize
265 }
266
267
268
269
270
271
272
273
274
275 func (tp typePointers) next(limit uintptr) (typePointers, uintptr) {
276 for {
277 if tp.mask != 0 {
278 return tp.nextFast()
279 }
280
281
282 if tp.typ == nil {
283 return typePointers{}, 0
284 }
285
286
287 if tp.addr+goarch.PtrSize*ptrBits >= tp.elem+tp.typ.PtrBytes {
288 tp.elem += tp.typ.Size_
289 tp.addr = tp.elem
290 } else {
291 tp.addr += ptrBits * goarch.PtrSize
292 }
293
294
295 if tp.addr >= limit {
296 return typePointers{}, 0
297 }
298
299
300 tp.mask = readUintptr(addb(tp.typ.GCData, (tp.addr-tp.elem)/goarch.PtrSize/8))
301 if tp.addr+goarch.PtrSize*ptrBits > limit {
302 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
303 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
304 }
305 }
306 }
307
308
309
310
311
312
313
314
315 func (tp typePointers) fastForward(n, limit uintptr) typePointers {
316
317 target := tp.addr + n
318 if target >= limit {
319 return typePointers{}
320 }
321 if tp.typ == nil {
322
323
324 tp.mask &^= (1 << ((target - tp.addr) / goarch.PtrSize)) - 1
325
326 if tp.addr+goarch.PtrSize*ptrBits > limit {
327 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
328 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
329 }
330 return tp
331 }
332
333
334
335 if n >= tp.typ.Size_ {
336
337
338 oldelem := tp.elem
339 tp.elem += (tp.addr - tp.elem + n) / tp.typ.Size_ * tp.typ.Size_
340 tp.addr = tp.elem + alignDown(n-(tp.elem-oldelem), ptrBits*goarch.PtrSize)
341 } else {
342 tp.addr += alignDown(n, ptrBits*goarch.PtrSize)
343 }
344
345 if tp.addr-tp.elem >= tp.typ.PtrBytes {
346
347
348 tp.elem += tp.typ.Size_
349 tp.addr = tp.elem
350 tp.mask = readUintptr(tp.typ.GCData)
351
352
353 if tp.addr >= limit {
354 return typePointers{}
355 }
356 } else {
357
358
359 tp.mask = readUintptr(addb(tp.typ.GCData, (tp.addr-tp.elem)/goarch.PtrSize/8))
360 tp.mask &^= (1 << ((target - tp.addr) / goarch.PtrSize)) - 1
361 }
362 if tp.addr+goarch.PtrSize*ptrBits > limit {
363 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
364 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
365 }
366 return tp
367 }
368
369
370
371
372
373
374 func (span *mspan) objBase(addr uintptr) uintptr {
375 return span.base() + span.objIndex(addr)*span.elemsize
376 }
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420 func bulkBarrierPreWrite(dst, src, size uintptr, typ *abi.Type) {
421 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
422 throw("bulkBarrierPreWrite: unaligned arguments")
423 }
424 if !writeBarrier.enabled {
425 return
426 }
427 s := spanOf(dst)
428 if s == nil {
429
430
431 for _, datap := range activeModules() {
432 if datap.data <= dst && dst < datap.edata {
433 bulkBarrierBitmap(dst, src, size, dst-datap.data, datap.gcdatamask.bytedata)
434 return
435 }
436 }
437 for _, datap := range activeModules() {
438 if datap.bss <= dst && dst < datap.ebss {
439 bulkBarrierBitmap(dst, src, size, dst-datap.bss, datap.gcbssmask.bytedata)
440 return
441 }
442 }
443 return
444 } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
445
446
447
448
449
450
451 return
452 }
453 buf := &getg().m.p.ptr().wbBuf
454
455
456 const doubleCheck = false
457 if doubleCheck {
458 doubleCheckTypePointersOfType(s, typ, dst, size)
459 }
460
461 var tp typePointers
462 if typ != nil && typ.Kind_&kindGCProg == 0 {
463 tp = s.typePointersOfType(typ, dst)
464 } else {
465 tp = s.typePointersOf(dst, size)
466 }
467 if src == 0 {
468 for {
469 var addr uintptr
470 if tp, addr = tp.next(dst + size); addr == 0 {
471 break
472 }
473 dstx := (*uintptr)(unsafe.Pointer(addr))
474 p := buf.get1()
475 p[0] = *dstx
476 }
477 } else {
478 for {
479 var addr uintptr
480 if tp, addr = tp.next(dst + size); addr == 0 {
481 break
482 }
483 dstx := (*uintptr)(unsafe.Pointer(addr))
484 srcx := (*uintptr)(unsafe.Pointer(src + (addr - dst)))
485 p := buf.get2()
486 p[0] = *dstx
487 p[1] = *srcx
488 }
489 }
490 }
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506 func bulkBarrierPreWriteSrcOnly(dst, src, size uintptr, typ *abi.Type) {
507 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
508 throw("bulkBarrierPreWrite: unaligned arguments")
509 }
510 if !writeBarrier.enabled {
511 return
512 }
513 buf := &getg().m.p.ptr().wbBuf
514 s := spanOf(dst)
515
516
517 const doubleCheck = false
518 if doubleCheck {
519 doubleCheckTypePointersOfType(s, typ, dst, size)
520 }
521
522 var tp typePointers
523 if typ != nil && typ.Kind_&kindGCProg == 0 {
524 tp = s.typePointersOfType(typ, dst)
525 } else {
526 tp = s.typePointersOf(dst, size)
527 }
528 for {
529 var addr uintptr
530 if tp, addr = tp.next(dst + size); addr == 0 {
531 break
532 }
533 srcx := (*uintptr)(unsafe.Pointer(addr - dst + src))
534 p := buf.get1()
535 p[0] = *srcx
536 }
537 }
538
539
540
541
542
543
544 func (s *mspan) initHeapBits(forceClear bool) {
545 if (!s.spanclass.noscan() && heapBitsInSpan(s.elemsize)) || s.isUserArenaChunk {
546 b := s.heapBits()
547 for i := range b {
548 b[i] = 0
549 }
550 }
551 }
552
553
554
555 func bswapIfBigEndian(x uintptr) uintptr {
556 if goarch.BigEndian {
557 if goarch.PtrSize == 8 {
558 return uintptr(sys.Bswap64(uint64(x)))
559 }
560 return uintptr(sys.Bswap32(uint32(x)))
561 }
562 return x
563 }
564
565 type writeUserArenaHeapBits struct {
566 offset uintptr
567 mask uintptr
568 valid uintptr
569 low uintptr
570 }
571
572 func (s *mspan) writeUserArenaHeapBits(addr uintptr) (h writeUserArenaHeapBits) {
573 offset := addr - s.base()
574
575
576
577
578 h.low = offset / goarch.PtrSize % ptrBits
579
580
581 h.offset = offset - h.low*goarch.PtrSize
582
583
584 h.mask = 0
585 h.valid = h.low
586
587 return
588 }
589
590
591
592 func (h writeUserArenaHeapBits) write(s *mspan, bits, valid uintptr) writeUserArenaHeapBits {
593 if h.valid+valid <= ptrBits {
594
595 h.mask |= bits << h.valid
596 h.valid += valid
597 return h
598 }
599
600
601
602 data := h.mask | bits<<h.valid
603 h.mask = bits >> (ptrBits - h.valid)
604 h.valid += valid - ptrBits
605
606
607 idx := h.offset / (ptrBits * goarch.PtrSize)
608 m := uintptr(1)<<h.low - 1
609 bitmap := s.heapBits()
610 bitmap[idx] = bswapIfBigEndian(bswapIfBigEndian(bitmap[idx])&m | data)
611
612
613
614
615
616
617 h.offset += ptrBits * goarch.PtrSize
618 h.low = 0
619 return h
620 }
621
622
623 func (h writeUserArenaHeapBits) pad(s *mspan, size uintptr) writeUserArenaHeapBits {
624 if size == 0 {
625 return h
626 }
627 words := size / goarch.PtrSize
628 for words > ptrBits {
629 h = h.write(s, 0, ptrBits)
630 words -= ptrBits
631 }
632 return h.write(s, 0, words)
633 }
634
635
636
637 func (h writeUserArenaHeapBits) flush(s *mspan, addr, size uintptr) {
638 offset := addr - s.base()
639
640
641
642
643 zeros := (offset+size-h.offset)/goarch.PtrSize - h.valid
644
645
646 if zeros > 0 {
647 z := ptrBits - h.valid
648 if z > zeros {
649 z = zeros
650 }
651 h.valid += z
652 zeros -= z
653 }
654
655
656 bitmap := s.heapBits()
657 idx := h.offset / (ptrBits * goarch.PtrSize)
658
659
660 if h.valid != h.low {
661 m := uintptr(1)<<h.low - 1
662 m |= ^(uintptr(1)<<h.valid - 1)
663 bitmap[idx] = bswapIfBigEndian(bswapIfBigEndian(bitmap[idx])&m | h.mask)
664 }
665 if zeros == 0 {
666 return
667 }
668
669
670 h.offset += ptrBits * goarch.PtrSize
671
672
673
674
675
676
677 for {
678
679 idx := h.offset / (ptrBits * goarch.PtrSize)
680 if zeros < ptrBits {
681 bitmap[idx] = bswapIfBigEndian(bswapIfBigEndian(bitmap[idx]) &^ (uintptr(1)<<zeros - 1))
682 break
683 } else if zeros == ptrBits {
684 bitmap[idx] = 0
685 break
686 } else {
687 bitmap[idx] = 0
688 zeros -= ptrBits
689 }
690 h.offset += ptrBits * goarch.PtrSize
691 }
692 }
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708 func (span *mspan) heapBits() []uintptr {
709 const doubleCheck = false
710
711 if doubleCheck && !span.isUserArenaChunk {
712 if span.spanclass.noscan() {
713 throw("heapBits called for noscan")
714 }
715 if span.elemsize > minSizeForMallocHeader {
716 throw("heapBits called for span class that should have a malloc header")
717 }
718 }
719
720
721
722 if span.npages == 1 {
723
724 return heapBitsSlice(span.base(), pageSize)
725 }
726 return heapBitsSlice(span.base(), span.npages*pageSize)
727 }
728
729
730
731
732 func heapBitsSlice(spanBase, spanSize uintptr) []uintptr {
733 bitmapSize := spanSize / goarch.PtrSize / 8
734 elems := int(bitmapSize / goarch.PtrSize)
735 var sl notInHeapSlice
736 sl = notInHeapSlice{(*notInHeap)(unsafe.Pointer(spanBase + spanSize - bitmapSize)), elems, elems}
737 return *(*[]uintptr)(unsafe.Pointer(&sl))
738 }
739
740
741
742
743
744
745
746 func (span *mspan) heapBitsSmallForAddr(addr uintptr) uintptr {
747 spanSize := span.npages * pageSize
748 bitmapSize := spanSize / goarch.PtrSize / 8
749 hbits := (*byte)(unsafe.Pointer(span.base() + spanSize - bitmapSize))
750
751
752
753
754
755
756
757
758
759 i := (addr - span.base()) / goarch.PtrSize / ptrBits
760 j := (addr - span.base()) / goarch.PtrSize % ptrBits
761 bits := span.elemsize / goarch.PtrSize
762 word0 := (*uintptr)(unsafe.Pointer(addb(hbits, goarch.PtrSize*(i+0))))
763 word1 := (*uintptr)(unsafe.Pointer(addb(hbits, goarch.PtrSize*(i+1))))
764
765 var read uintptr
766 if j+bits > ptrBits {
767
768 bits0 := ptrBits - j
769 bits1 := bits - bits0
770 read = *word0 >> j
771 read |= (*word1 & ((1 << bits1) - 1)) << bits0
772 } else {
773
774 read = (*word0 >> j) & ((1 << bits) - 1)
775 }
776 return read
777 }
778
779
780
781
782
783
784
785
786 func (span *mspan) writeHeapBitsSmall(x, dataSize uintptr, typ *_type) (scanSize uintptr) {
787
788 src0 := readUintptr(typ.GCData)
789
790
791 bits := span.elemsize / goarch.PtrSize
792 scanSize = typ.PtrBytes
793 src := src0
794 switch typ.Size_ {
795 case goarch.PtrSize:
796 src = (1 << (dataSize / goarch.PtrSize)) - 1
797 default:
798 for i := typ.Size_; i < dataSize; i += typ.Size_ {
799 src |= src0 << (i / goarch.PtrSize)
800 scanSize += typ.Size_
801 }
802 }
803
804
805
806 dst := span.heapBits()
807 o := (x - span.base()) / goarch.PtrSize
808 i := o / ptrBits
809 j := o % ptrBits
810 if j+bits > ptrBits {
811
812 bits0 := ptrBits - j
813 bits1 := bits - bits0
814 dst[i+0] = dst[i+0]&(^uintptr(0)>>bits0) | (src << j)
815 dst[i+1] = dst[i+1]&^((1<<bits1)-1) | (src >> bits0)
816 } else {
817
818 dst[i] = (dst[i] &^ (((1 << bits) - 1) << j)) | (src << j)
819 }
820
821 const doubleCheck = false
822 if doubleCheck {
823 srcRead := span.heapBitsSmallForAddr(x)
824 if srcRead != src {
825 print("runtime: x=", hex(x), " i=", i, " j=", j, " bits=", bits, "\n")
826 print("runtime: dataSize=", dataSize, " typ.Size_=", typ.Size_, " typ.PtrBytes=", typ.PtrBytes, "\n")
827 print("runtime: src0=", hex(src0), " src=", hex(src), " srcRead=", hex(srcRead), "\n")
828 throw("bad pointer bits written for small object")
829 }
830 }
831 return
832 }
833
834
835 func heapBitsSetType(x, size, dataSize uintptr, typ *_type) {
836 }
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854 func heapSetType(x, dataSize uintptr, typ *_type, header **_type, span *mspan) (scanSize uintptr) {
855 const doubleCheck = false
856
857 gctyp := typ
858 if header == nil {
859 if doubleCheck && (!heapBitsInSpan(dataSize) || !heapBitsInSpan(span.elemsize)) {
860 throw("tried to write heap bits, but no heap bits in span")
861 }
862
863 scanSize = span.writeHeapBitsSmall(x, dataSize, typ)
864 } else {
865 if typ.Kind_&kindGCProg != 0 {
866
867
868
869 if span.spanclass.sizeclass() != 0 {
870 throw("GCProg for type that isn't large")
871 }
872 spaceNeeded := alignUp(unsafe.Sizeof(_type{}), goarch.PtrSize)
873 heapBitsOff := spaceNeeded
874 spaceNeeded += alignUp(typ.PtrBytes/goarch.PtrSize/8, goarch.PtrSize)
875 npages := alignUp(spaceNeeded, pageSize) / pageSize
876 var progSpan *mspan
877 systemstack(func() {
878 progSpan = mheap_.allocManual(npages, spanAllocPtrScalarBits)
879 memclrNoHeapPointers(unsafe.Pointer(progSpan.base()), progSpan.npages*pageSize)
880 })
881
882
883
884
885 gctyp = (*_type)(unsafe.Pointer(progSpan.base()))
886 gctyp.Size_ = typ.Size_
887 gctyp.PtrBytes = typ.PtrBytes
888 gctyp.GCData = (*byte)(add(unsafe.Pointer(progSpan.base()), heapBitsOff))
889 gctyp.TFlag = abi.TFlagUnrolledBitmap
890
891
892 runGCProg(addb(typ.GCData, 4), gctyp.GCData)
893 }
894
895
896 *header = gctyp
897 scanSize = span.elemsize
898 }
899
900 if doubleCheck {
901 doubleCheckHeapPointers(x, dataSize, gctyp, header, span)
902
903
904
905
906 maxIterBytes := span.elemsize
907 if header == nil {
908 maxIterBytes = dataSize
909 }
910 off := alignUp(uintptr(cheaprand())%dataSize, goarch.PtrSize)
911 size := dataSize - off
912 if size == 0 {
913 off -= goarch.PtrSize
914 size += goarch.PtrSize
915 }
916 interior := x + off
917 size -= alignDown(uintptr(cheaprand())%size, goarch.PtrSize)
918 if size == 0 {
919 size = goarch.PtrSize
920 }
921
922 size = (size + gctyp.Size_ - 1) / gctyp.Size_ * gctyp.Size_
923 if interior+size > x+maxIterBytes {
924 size = x + maxIterBytes - interior
925 }
926 doubleCheckHeapPointersInterior(x, interior, size, dataSize, gctyp, header, span)
927 }
928 return
929 }
930
931 func doubleCheckHeapPointers(x, dataSize uintptr, typ *_type, header **_type, span *mspan) {
932
933 tp := span.typePointersOfUnchecked(span.objBase(x))
934 maxIterBytes := span.elemsize
935 if header == nil {
936 maxIterBytes = dataSize
937 }
938 bad := false
939 for i := uintptr(0); i < maxIterBytes; i += goarch.PtrSize {
940
941 want := false
942 if i < span.elemsize {
943 off := i % typ.Size_
944 if off < typ.PtrBytes {
945 j := off / goarch.PtrSize
946 want = *addb(typ.GCData, j/8)>>(j%8)&1 != 0
947 }
948 }
949 if want {
950 var addr uintptr
951 tp, addr = tp.next(x + span.elemsize)
952 if addr == 0 {
953 println("runtime: found bad iterator")
954 }
955 if addr != x+i {
956 print("runtime: addr=", hex(addr), " x+i=", hex(x+i), "\n")
957 bad = true
958 }
959 }
960 }
961 if !bad {
962 var addr uintptr
963 tp, addr = tp.next(x + span.elemsize)
964 if addr == 0 {
965 return
966 }
967 println("runtime: extra pointer:", hex(addr))
968 }
969 print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, " hasGCProg=", typ.Kind_&kindGCProg != 0, "\n")
970 print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, "\n")
971 print("runtime: typ=", unsafe.Pointer(typ), " typ.PtrBytes=", typ.PtrBytes, "\n")
972 print("runtime: limit=", hex(x+span.elemsize), "\n")
973 tp = span.typePointersOfUnchecked(x)
974 dumpTypePointers(tp)
975 for {
976 var addr uintptr
977 if tp, addr = tp.next(x + span.elemsize); addr == 0 {
978 println("runtime: would've stopped here")
979 dumpTypePointers(tp)
980 break
981 }
982 print("runtime: addr=", hex(addr), "\n")
983 dumpTypePointers(tp)
984 }
985 throw("heapSetType: pointer entry not correct")
986 }
987
988 func doubleCheckHeapPointersInterior(x, interior, size, dataSize uintptr, typ *_type, header **_type, span *mspan) {
989 bad := false
990 if interior < x {
991 print("runtime: interior=", hex(interior), " x=", hex(x), "\n")
992 throw("found bad interior pointer")
993 }
994 off := interior - x
995 tp := span.typePointersOf(interior, size)
996 for i := off; i < off+size; i += goarch.PtrSize {
997
998 want := false
999 if i < span.elemsize {
1000 off := i % typ.Size_
1001 if off < typ.PtrBytes {
1002 j := off / goarch.PtrSize
1003 want = *addb(typ.GCData, j/8)>>(j%8)&1 != 0
1004 }
1005 }
1006 if want {
1007 var addr uintptr
1008 tp, addr = tp.next(interior + size)
1009 if addr == 0 {
1010 println("runtime: found bad iterator")
1011 bad = true
1012 }
1013 if addr != x+i {
1014 print("runtime: addr=", hex(addr), " x+i=", hex(x+i), "\n")
1015 bad = true
1016 }
1017 }
1018 }
1019 if !bad {
1020 var addr uintptr
1021 tp, addr = tp.next(interior + size)
1022 if addr == 0 {
1023 return
1024 }
1025 println("runtime: extra pointer:", hex(addr))
1026 }
1027 print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, "\n")
1028 print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, " interior=", hex(interior), " size=", size, "\n")
1029 print("runtime: limit=", hex(interior+size), "\n")
1030 tp = span.typePointersOf(interior, size)
1031 dumpTypePointers(tp)
1032 for {
1033 var addr uintptr
1034 if tp, addr = tp.next(interior + size); addr == 0 {
1035 println("runtime: would've stopped here")
1036 dumpTypePointers(tp)
1037 break
1038 }
1039 print("runtime: addr=", hex(addr), "\n")
1040 dumpTypePointers(tp)
1041 }
1042
1043 print("runtime: want: ")
1044 for i := off; i < off+size; i += goarch.PtrSize {
1045
1046 want := false
1047 if i < dataSize {
1048 off := i % typ.Size_
1049 if off < typ.PtrBytes {
1050 j := off / goarch.PtrSize
1051 want = *addb(typ.GCData, j/8)>>(j%8)&1 != 0
1052 }
1053 }
1054 if want {
1055 print("1")
1056 } else {
1057 print("0")
1058 }
1059 }
1060 println()
1061
1062 throw("heapSetType: pointer entry not correct")
1063 }
1064
1065
1066 func doubleCheckTypePointersOfType(s *mspan, typ *_type, addr, size uintptr) {
1067 if typ == nil || typ.Kind_&kindGCProg != 0 {
1068 return
1069 }
1070 if typ.Kind_&kindMask == kindInterface {
1071
1072
1073
1074 return
1075 }
1076 tp0 := s.typePointersOfType(typ, addr)
1077 tp1 := s.typePointersOf(addr, size)
1078 failed := false
1079 for {
1080 var addr0, addr1 uintptr
1081 tp0, addr0 = tp0.next(addr + size)
1082 tp1, addr1 = tp1.next(addr + size)
1083 if addr0 != addr1 {
1084 failed = true
1085 break
1086 }
1087 if addr0 == 0 {
1088 break
1089 }
1090 }
1091 if failed {
1092 tp0 := s.typePointersOfType(typ, addr)
1093 tp1 := s.typePointersOf(addr, size)
1094 print("runtime: addr=", hex(addr), " size=", size, "\n")
1095 print("runtime: type=", toRType(typ).string(), "\n")
1096 dumpTypePointers(tp0)
1097 dumpTypePointers(tp1)
1098 for {
1099 var addr0, addr1 uintptr
1100 tp0, addr0 = tp0.next(addr + size)
1101 tp1, addr1 = tp1.next(addr + size)
1102 print("runtime: ", hex(addr0), " ", hex(addr1), "\n")
1103 if addr0 == 0 && addr1 == 0 {
1104 break
1105 }
1106 }
1107 throw("mismatch between typePointersOfType and typePointersOf")
1108 }
1109 }
1110
1111 func dumpTypePointers(tp typePointers) {
1112 print("runtime: tp.elem=", hex(tp.elem), " tp.typ=", unsafe.Pointer(tp.typ), "\n")
1113 print("runtime: tp.addr=", hex(tp.addr), " tp.mask=")
1114 for i := uintptr(0); i < ptrBits; i++ {
1115 if tp.mask&(uintptr(1)<<i) != 0 {
1116 print("1")
1117 } else {
1118 print("0")
1119 }
1120 }
1121 println()
1122 }
1123
1124
1125
1126
1127
1128
1129 func getgcmask(ep any) (mask []byte) {
1130 e := *efaceOf(&ep)
1131 p := e.data
1132 t := e._type
1133
1134 var et *_type
1135 if t.Kind_&kindMask != kindPtr {
1136 throw("bad argument to getgcmask: expected type to be a pointer to the value type whose mask is being queried")
1137 }
1138 et = (*ptrtype)(unsafe.Pointer(t)).Elem
1139
1140
1141 for _, datap := range activeModules() {
1142
1143 if datap.data <= uintptr(p) && uintptr(p) < datap.edata {
1144 bitmap := datap.gcdatamask.bytedata
1145 n := et.Size_
1146 mask = make([]byte, n/goarch.PtrSize)
1147 for i := uintptr(0); i < n; i += goarch.PtrSize {
1148 off := (uintptr(p) + i - datap.data) / goarch.PtrSize
1149 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1150 }
1151 return
1152 }
1153
1154
1155 if datap.bss <= uintptr(p) && uintptr(p) < datap.ebss {
1156 bitmap := datap.gcbssmask.bytedata
1157 n := et.Size_
1158 mask = make([]byte, n/goarch.PtrSize)
1159 for i := uintptr(0); i < n; i += goarch.PtrSize {
1160 off := (uintptr(p) + i - datap.bss) / goarch.PtrSize
1161 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1162 }
1163 return
1164 }
1165 }
1166
1167
1168 if base, s, _ := findObject(uintptr(p), 0, 0); base != 0 {
1169 if s.spanclass.noscan() {
1170 return nil
1171 }
1172 limit := base + s.elemsize
1173
1174
1175
1176
1177 tp := s.typePointersOfUnchecked(base)
1178 base = tp.addr
1179
1180
1181 maskFromHeap := make([]byte, (limit-base)/goarch.PtrSize)
1182 for {
1183 var addr uintptr
1184 if tp, addr = tp.next(limit); addr == 0 {
1185 break
1186 }
1187 maskFromHeap[(addr-base)/goarch.PtrSize] = 1
1188 }
1189
1190
1191
1192
1193 for i := limit; i < s.elemsize; i++ {
1194 if *(*byte)(unsafe.Pointer(i)) != 0 {
1195 throw("found non-zeroed tail of allocation")
1196 }
1197 }
1198
1199
1200
1201 for len(maskFromHeap) > 0 && maskFromHeap[len(maskFromHeap)-1] == 0 {
1202 maskFromHeap = maskFromHeap[:len(maskFromHeap)-1]
1203 }
1204
1205 if et.Kind_&kindGCProg == 0 {
1206
1207 maskFromType := make([]byte, (limit-base)/goarch.PtrSize)
1208 tp = s.typePointersOfType(et, base)
1209 for {
1210 var addr uintptr
1211 if tp, addr = tp.next(limit); addr == 0 {
1212 break
1213 }
1214 maskFromType[(addr-base)/goarch.PtrSize] = 1
1215 }
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227 differs := false
1228 for i := range maskFromHeap {
1229 if maskFromHeap[i] != maskFromType[i] {
1230 differs = true
1231 break
1232 }
1233 }
1234
1235 if differs {
1236 print("runtime: heap mask=")
1237 for _, b := range maskFromHeap {
1238 print(b)
1239 }
1240 println()
1241 print("runtime: type mask=")
1242 for _, b := range maskFromType {
1243 print(b)
1244 }
1245 println()
1246 print("runtime: type=", toRType(et).string(), "\n")
1247 throw("found two different masks from two different methods")
1248 }
1249 }
1250
1251
1252 mask = maskFromHeap
1253
1254
1255
1256
1257 KeepAlive(ep)
1258 return
1259 }
1260
1261
1262 if gp := getg(); gp.m.curg.stack.lo <= uintptr(p) && uintptr(p) < gp.m.curg.stack.hi {
1263 found := false
1264 var u unwinder
1265 for u.initAt(gp.m.curg.sched.pc, gp.m.curg.sched.sp, 0, gp.m.curg, 0); u.valid(); u.next() {
1266 if u.frame.sp <= uintptr(p) && uintptr(p) < u.frame.varp {
1267 found = true
1268 break
1269 }
1270 }
1271 if found {
1272 locals, _, _ := u.frame.getStackMap(false)
1273 if locals.n == 0 {
1274 return
1275 }
1276 size := uintptr(locals.n) * goarch.PtrSize
1277 n := (*ptrtype)(unsafe.Pointer(t)).Elem.Size_
1278 mask = make([]byte, n/goarch.PtrSize)
1279 for i := uintptr(0); i < n; i += goarch.PtrSize {
1280 off := (uintptr(p) + i - u.frame.varp + size) / goarch.PtrSize
1281 mask[i/goarch.PtrSize] = locals.ptrbit(off)
1282 }
1283 }
1284 return
1285 }
1286
1287
1288
1289
1290 return
1291 }
1292
1293
1294
1295
1296
1297 func userArenaHeapBitsSetType(typ *_type, ptr unsafe.Pointer, s *mspan) {
1298 base := s.base()
1299 h := s.writeUserArenaHeapBits(uintptr(ptr))
1300
1301 p := typ.GCData
1302 var gcProgBits uintptr
1303 if typ.Kind_&kindGCProg != 0 {
1304
1305 gcProgBits = runGCProg(addb(p, 4), (*byte)(ptr))
1306 p = (*byte)(ptr)
1307 }
1308 nb := typ.PtrBytes / goarch.PtrSize
1309
1310 for i := uintptr(0); i < nb; i += ptrBits {
1311 k := nb - i
1312 if k > ptrBits {
1313 k = ptrBits
1314 }
1315
1316
1317
1318
1319
1320
1321 h = h.write(s, readUintptr(addb(p, i/8)), k)
1322 }
1323
1324
1325
1326
1327
1328
1329 h = h.pad(s, typ.Size_-typ.PtrBytes)
1330 h.flush(s, uintptr(ptr), typ.Size_)
1331
1332 if typ.Kind_&kindGCProg != 0 {
1333
1334 memclrNoHeapPointers(ptr, (gcProgBits+7)/8)
1335 }
1336
1337
1338
1339 s.largeType.PtrBytes = uintptr(ptr) - base + typ.PtrBytes
1340
1341
1342 const doubleCheck = false
1343 if doubleCheck {
1344 doubleCheckHeapPointersInterior(uintptr(ptr), uintptr(ptr), typ.Size_, typ.Size_, typ, &s.largeType, s)
1345 }
1346 }
1347
1348
1349 func writeHeapBitsForAddr() {
1350 panic("not implemented")
1351 }
1352
1353
1354 type heapBits struct {
1355 }
1356
1357
1358
1359
1360 func heapBitsForAddr(addr, size uintptr) heapBits {
1361 panic("not implemented")
1362 }
1363
1364
1365
1366
1367 func (h heapBits) next() (heapBits, uintptr) {
1368 panic("not implemented")
1369 }
1370
1371
1372
1373
1374 func (h heapBits) nextFast() (heapBits, uintptr) {
1375 panic("not implemented")
1376 }
1377
View as plain text