...
1
16
17 package caching
18
19 import (
20 `sync`
21 `sync/atomic`
22 `unsafe`
23
24 `github.com/bytedance/sonic/internal/rt`
25 )
26
27
28
29 const (
30 _LoadFactor = 0.5
31 _InitCapacity = 4096
32 )
33
34 type _ProgramMap struct {
35 n uint64
36 m uint32
37 b []_ProgramEntry
38 }
39
40 type _ProgramEntry struct {
41 vt *rt.GoType
42 fn interface{}
43 }
44
45 func newProgramMap() *_ProgramMap {
46 return &_ProgramMap {
47 n: 0,
48 m: _InitCapacity - 1,
49 b: make([]_ProgramEntry, _InitCapacity),
50 }
51 }
52
53 func (self *_ProgramMap) copy() *_ProgramMap {
54 fork := &_ProgramMap{
55 n: self.n,
56 m: self.m,
57 b: make([]_ProgramEntry, len(self.b)),
58 }
59 for i, f := range self.b {
60 fork.b[i] = f
61 }
62 return fork
63 }
64
65 func (self *_ProgramMap) get(vt *rt.GoType) interface{} {
66 i := self.m + 1
67 p := vt.Hash & self.m
68
69
70 for ; i > 0; i-- {
71 if b := self.b[p]; b.vt == vt {
72 return b.fn
73 } else if b.vt == nil {
74 break
75 } else {
76 p = (p + 1) & self.m
77 }
78 }
79
80
81 return nil
82 }
83
84 func (self *_ProgramMap) add(vt *rt.GoType, fn interface{}) *_ProgramMap {
85 p := self.copy()
86 f := float64(atomic.LoadUint64(&p.n) + 1) / float64(p.m + 1)
87
88
89 if f > _LoadFactor {
90 p = p.rehash()
91 }
92
93
94 p.insert(vt, fn)
95 return p
96 }
97
98 func (self *_ProgramMap) rehash() *_ProgramMap {
99 c := (self.m + 1) << 1
100 r := &_ProgramMap{m: c - 1, b: make([]_ProgramEntry, int(c))}
101
102
103 for i := uint32(0); i <= self.m; i++ {
104 if b := self.b[i]; b.vt != nil {
105 r.insert(b.vt, b.fn)
106 }
107 }
108
109
110 return r
111 }
112
113 func (self *_ProgramMap) insert(vt *rt.GoType, fn interface{}) {
114 h := vt.Hash
115 p := h & self.m
116
117
118 for i := uint32(0); i <= self.m; i++ {
119 if b := &self.b[p]; b.vt != nil {
120 p += 1
121 p &= self.m
122 } else {
123 b.vt = vt
124 b.fn = fn
125 atomic.AddUint64(&self.n, 1)
126 return
127 }
128 }
129
130
131 panic("no available slots")
132 }
133
134
135
136 type ProgramCache struct {
137 m sync.Mutex
138 p unsafe.Pointer
139 }
140
141 func CreateProgramCache() *ProgramCache {
142 return &ProgramCache {
143 m: sync.Mutex{},
144 p: unsafe.Pointer(newProgramMap()),
145 }
146 }
147
148 func (self *ProgramCache) Get(vt *rt.GoType) interface{} {
149 return (*_ProgramMap)(atomic.LoadPointer(&self.p)).get(vt)
150 }
151
152 func (self *ProgramCache) Compute(vt *rt.GoType, compute func(*rt.GoType, ... interface{}) (interface{}, error), ex ...interface{}) (interface{}, error) {
153 var err error
154 var val interface{}
155
156
157 self.m.Lock()
158 defer self.m.Unlock()
159
160
161 if val = self.Get(vt); val != nil {
162 return val, nil
163 }
164
165
166 if val, err = compute(vt, ex...); err != nil {
167 return nil, err
168 }
169
170
171 atomic.StorePointer(&self.p, unsafe.Pointer((*_ProgramMap)(atomic.LoadPointer(&self.p)).add(vt, val)))
172 return val, nil
173 }
174
View as plain text