...
1
16
17 package decoder
18
19 import (
20 `sync`
21 `unsafe`
22
23 `github.com/bytedance/sonic/internal/caching`
24 `github.com/bytedance/sonic/internal/native/types`
25 `github.com/bytedance/sonic/internal/rt`
26 )
27
28 const (
29 _MinSlice = 2
30 _MaxStack = 4096
31 _MaxStackBytes = _MaxStack * _PtrBytes
32 _MaxDigitNums = types.MaxDigitNums
33 )
34
35 const (
36 _PtrBytes = _PTR_SIZE / 8
37 _FsmOffset = (_MaxStack + 1) * _PtrBytes
38 _DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes
39 _StackSize = unsafe.Sizeof(_Stack{})
40 )
41
42 var (
43 stackPool = sync.Pool{}
44 valueCache = []unsafe.Pointer(nil)
45 fieldCache = []*caching.FieldMap(nil)
46 fieldCacheMux = sync.Mutex{}
47 programCache = caching.CreateProgramCache()
48 )
49
50 type _Stack struct {
51 sp uintptr
52 sb [_MaxStack]unsafe.Pointer
53 mm types.StateMachine
54 vp [types.MAX_RECURSE]unsafe.Pointer
55 dp [_MaxDigitNums]byte
56 }
57
58 type _Decoder func(
59 s string,
60 i int,
61 vp unsafe.Pointer,
62 sb *_Stack,
63 fv uint64,
64 sv string,
65 vk unsafe.Pointer,
66 ) (int, error)
67
68 var _KeepAlive struct {
69 s string
70 i int
71 vp unsafe.Pointer
72 sb *_Stack
73 fv uint64
74 sv string
75 vk unsafe.Pointer
76
77 ret int
78 err error
79
80 frame_decoder [_FP_offs]byte
81 frame_generic [_VD_offs]byte
82 }
83
84 var (
85 argPtrs = []bool{true, false, false, true, true, false, true, false, true}
86 localPtrs = []bool{}
87 )
88
89 var (
90 argPtrs_generic = []bool{true}
91 localPtrs_generic = []bool{}
92 )
93
94 func newStack() *_Stack {
95 if ret := stackPool.Get(); ret == nil {
96 return new(_Stack)
97 } else {
98 return ret.(*_Stack)
99 }
100 }
101
102 func resetStack(p *_Stack) {
103 memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
104 }
105
106 func freeStack(p *_Stack) {
107 p.sp = 0
108 stackPool.Put(p)
109 }
110
111 func freezeValue(v unsafe.Pointer) uintptr {
112 valueCache = append(valueCache, v)
113 return uintptr(v)
114 }
115
116 func freezeFields(v *caching.FieldMap) int64 {
117 fieldCacheMux.Lock()
118 fieldCache = append(fieldCache, v)
119 fieldCacheMux.Unlock()
120 return referenceFields(v)
121 }
122
123 func referenceFields(v *caching.FieldMap) int64 {
124 return int64(uintptr(unsafe.Pointer(v)))
125 }
126
127 func makeDecoder(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
128 if pp, err := newCompiler().compile(vt.Pack()); err != nil {
129 return nil, err
130 } else {
131 return newAssembler(pp).Load(), nil
132 }
133 }
134
135 func findOrCompile(vt *rt.GoType) (_Decoder, error) {
136 if val := programCache.Get(vt); val != nil {
137 return val.(_Decoder), nil
138 } else if ret, err := programCache.Compute(vt, makeDecoder); err == nil {
139 return ret.(_Decoder), nil
140 } else {
141 return nil, err
142 }
143 }
144
View as plain text