...
Source file
src/runtime/pinner.go
Documentation: runtime
1
2
3
4
5 package runtime
6
7 import (
8 "runtime/internal/atomic"
9 "unsafe"
10 )
11
12
13
14
15 type Pinner struct {
16 *pinner
17 }
18
19
20
21
22
23
24
25
26
27
28
29 func (p *Pinner) Pin(pointer any) {
30 if p.pinner == nil {
31
32 mp := acquirem()
33 if pp := mp.p.ptr(); pp != nil {
34 p.pinner = pp.pinnerCache
35 pp.pinnerCache = nil
36 }
37 releasem(mp)
38
39 if p.pinner == nil {
40
41 p.pinner = new(pinner)
42 p.refs = p.refStore[:0]
43
44
45
46
47
48
49
50 SetFinalizer(p.pinner, func(i *pinner) {
51 if len(i.refs) != 0 {
52 i.unpin()
53 pinnerLeakPanic()
54 }
55 })
56 }
57 }
58 ptr := pinnerGetPtr(&pointer)
59 if setPinned(ptr, true) {
60 p.refs = append(p.refs, ptr)
61 }
62 }
63
64
65 func (p *Pinner) Unpin() {
66 p.pinner.unpin()
67
68 mp := acquirem()
69 if pp := mp.p.ptr(); pp != nil && pp.pinnerCache == nil {
70
71
72
73
74 pp.pinnerCache = p.pinner
75 p.pinner = nil
76 }
77 releasem(mp)
78 }
79
80 const (
81 pinnerSize = 64
82 pinnerRefStoreSize = (pinnerSize - unsafe.Sizeof([]unsafe.Pointer{})) / unsafe.Sizeof(unsafe.Pointer(nil))
83 )
84
85 type pinner struct {
86 refs []unsafe.Pointer
87 refStore [pinnerRefStoreSize]unsafe.Pointer
88 }
89
90 func (p *pinner) unpin() {
91 if p == nil || p.refs == nil {
92 return
93 }
94 for i := range p.refs {
95 setPinned(p.refs[i], false)
96 }
97
98
99
100 p.refStore = [pinnerRefStoreSize]unsafe.Pointer{}
101 p.refs = p.refStore[:0]
102 }
103
104 func pinnerGetPtr(i *any) unsafe.Pointer {
105 e := efaceOf(i)
106 etyp := e._type
107 if etyp == nil {
108 panic(errorString("runtime.Pinner: argument is nil"))
109 }
110 if kind := etyp.Kind_ & kindMask; kind != kindPtr && kind != kindUnsafePointer {
111 panic(errorString("runtime.Pinner: argument is not a pointer: " + toRType(etyp).string()))
112 }
113 if inUserArenaChunk(uintptr(e.data)) {
114
115 panic(errorString("runtime.Pinner: object was allocated into an arena"))
116 }
117 return e.data
118 }
119
120
121
122
123
124 func isPinned(ptr unsafe.Pointer) bool {
125 span := spanOfHeap(uintptr(ptr))
126 if span == nil {
127
128
129 return true
130 }
131 pinnerBits := span.getPinnerBits()
132
133
134
135 if pinnerBits == nil {
136 return false
137 }
138 objIndex := span.objIndex(uintptr(ptr))
139 pinState := pinnerBits.ofObject(objIndex)
140 KeepAlive(ptr)
141 return pinState.isPinned()
142 }
143
144
145
146
147
148 func setPinned(ptr unsafe.Pointer, pin bool) bool {
149 span := spanOfHeap(uintptr(ptr))
150 if span == nil {
151 if !pin {
152 panic(errorString("tried to unpin non-Go pointer"))
153 }
154
155
156 return false
157 }
158
159
160
161 mp := acquirem()
162 span.ensureSwept()
163 KeepAlive(ptr)
164
165 objIndex := span.objIndex(uintptr(ptr))
166
167 lock(&span.speciallock)
168
169 pinnerBits := span.getPinnerBits()
170 if pinnerBits == nil {
171 pinnerBits = span.newPinnerBits()
172 span.setPinnerBits(pinnerBits)
173 }
174 pinState := pinnerBits.ofObject(objIndex)
175 if pin {
176 if pinState.isPinned() {
177
178 pinState.setMultiPinned(true)
179
180
181 systemstack(func() {
182 offset := objIndex * span.elemsize
183 span.incPinCounter(offset)
184 })
185 } else {
186
187 pinState.setPinned(true)
188 }
189 } else {
190
191 if pinState.isPinned() {
192 if pinState.isMultiPinned() {
193 var exists bool
194
195 systemstack(func() {
196 offset := objIndex * span.elemsize
197 exists = span.decPinCounter(offset)
198 })
199 if !exists {
200
201 pinState.setMultiPinned(false)
202 }
203 } else {
204
205 pinState.setPinned(false)
206 }
207 } else {
208
209 throw("runtime.Pinner: object already unpinned")
210 }
211 }
212 unlock(&span.speciallock)
213 releasem(mp)
214 return true
215 }
216
217 type pinState struct {
218 bytep *uint8
219 byteVal uint8
220 mask uint8
221 }
222
223
224
225
226 func (v *pinState) isPinned() bool {
227 return (v.byteVal & v.mask) != 0
228 }
229
230 func (v *pinState) isMultiPinned() bool {
231 return (v.byteVal & (v.mask << 1)) != 0
232 }
233
234 func (v *pinState) setPinned(val bool) {
235 v.set(val, false)
236 }
237
238 func (v *pinState) setMultiPinned(val bool) {
239 v.set(val, true)
240 }
241
242
243
244 func (v *pinState) set(val bool, multipin bool) {
245 mask := v.mask
246 if multipin {
247 mask <<= 1
248 }
249 if val {
250 atomic.Or8(v.bytep, mask)
251 } else {
252 atomic.And8(v.bytep, ^mask)
253 }
254 }
255
256
257 type pinnerBits gcBits
258
259
260
261
262
263 func (p *pinnerBits) ofObject(n uintptr) pinState {
264 bytep, mask := (*gcBits)(p).bitp(n * 2)
265 byteVal := atomic.Load8(bytep)
266 return pinState{bytep, byteVal, mask}
267 }
268
269 func (s *mspan) pinnerBitSize() uintptr {
270 return divRoundUp(uintptr(s.nelems)*2, 8)
271 }
272
273
274
275
276 func (s *mspan) newPinnerBits() *pinnerBits {
277 return (*pinnerBits)(newMarkBits(uintptr(s.nelems) * 2))
278 }
279
280
281
282
283 func (s *mspan) getPinnerBits() *pinnerBits {
284 return (*pinnerBits)(atomic.Loadp(unsafe.Pointer(&s.pinnerBits)))
285 }
286
287 func (s *mspan) setPinnerBits(p *pinnerBits) {
288 atomicstorep(unsafe.Pointer(&s.pinnerBits), unsafe.Pointer(p))
289 }
290
291
292
293
294 func (s *mspan) refreshPinnerBits() {
295 p := s.getPinnerBits()
296 if p == nil {
297 return
298 }
299
300 hasPins := false
301 bytes := alignUp(s.pinnerBitSize(), 8)
302
303
304
305
306
307 for _, x := range unsafe.Slice((*uint64)(unsafe.Pointer(&p.x)), bytes/8) {
308 if x != 0 {
309 hasPins = true
310 break
311 }
312 }
313
314 if hasPins {
315 newPinnerBits := s.newPinnerBits()
316 memmove(unsafe.Pointer(&newPinnerBits.x), unsafe.Pointer(&p.x), bytes)
317 s.setPinnerBits(newPinnerBits)
318 } else {
319 s.setPinnerBits(nil)
320 }
321 }
322
323
324
325 func (span *mspan) incPinCounter(offset uintptr) {
326 var rec *specialPinCounter
327 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
328 if !exists {
329 lock(&mheap_.speciallock)
330 rec = (*specialPinCounter)(mheap_.specialPinCounterAlloc.alloc())
331 unlock(&mheap_.speciallock)
332
333 rec.special.offset = uint16(offset)
334 rec.special.kind = _KindSpecialPinCounter
335 rec.special.next = *ref
336 *ref = (*special)(unsafe.Pointer(rec))
337 spanHasSpecials(span)
338 } else {
339 rec = (*specialPinCounter)(unsafe.Pointer(*ref))
340 }
341 rec.counter++
342 }
343
344
345
346 func (span *mspan) decPinCounter(offset uintptr) bool {
347 ref, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
348 if !exists {
349 throw("runtime.Pinner: decreased non-existing pin counter")
350 }
351 counter := (*specialPinCounter)(unsafe.Pointer(*ref))
352 counter.counter--
353 if counter.counter == 0 {
354 *ref = counter.special.next
355 if span.specials == nil {
356 spanHasNoSpecials(span)
357 }
358 lock(&mheap_.speciallock)
359 mheap_.specialPinCounterAlloc.free(unsafe.Pointer(counter))
360 unlock(&mheap_.speciallock)
361 return false
362 }
363 return true
364 }
365
366
367 func pinnerGetPinCounter(addr unsafe.Pointer) *uintptr {
368 _, span, objIndex := findObject(uintptr(addr), 0, 0)
369 offset := objIndex * span.elemsize
370 t, exists := span.specialFindSplicePoint(offset, _KindSpecialPinCounter)
371 if !exists {
372 return nil
373 }
374 counter := (*specialPinCounter)(unsafe.Pointer(*t))
375 return &counter.counter
376 }
377
378
379
380 var pinnerLeakPanic = func() {
381 panic(errorString("runtime.Pinner: found leaking pinned pointer; forgot to call Unpin()?"))
382 }
383
View as plain text