1
2
3
4
19
20
25
26 package abi
27
28 import (
29 `fmt`
30 `reflect`
31
32 . `github.com/chenzhuoyu/iasm/x86_64`
33 )
34
35
61
62 const zeroRegGo = XMM15
63
64 var iregOrderGo = [...]Register64 {
65 RAX,
66 RBX,
67 RCX,
68 RDI,
69 RSI,
70 R8,
71 R9,
72 R10,
73 R11,
74 }
75
76 var xregOrderGo = [...]XMMRegister {
77 XMM0,
78 XMM1,
79 XMM2,
80 XMM3,
81 XMM4,
82 XMM5,
83 XMM6,
84 XMM7,
85 XMM8,
86 XMM9,
87 XMM10,
88 XMM11,
89 XMM12,
90 XMM13,
91 XMM14,
92 }
93
94 func ReservedRegs(callc bool) []Register {
95 if callc {
96 return nil
97 }
98 return []Register {
99 R14,
100 R15,
101 }
102 }
103
104 type stackAlloc struct {
105 s uint32
106 i int
107 x int
108 }
109
110 func (self *stackAlloc) reset() {
111 self.i, self.x = 0, 0
112 }
113
114 func (self *stackAlloc) ireg(vt reflect.Type) (p Parameter) {
115 p = mkIReg(vt, iregOrderGo[self.i])
116 self.i++
117 return
118 }
119
120 func (self *stackAlloc) xreg(vt reflect.Type) (p Parameter) {
121 p = mkXReg(vt, xregOrderGo[self.x])
122 self.x++
123 return
124 }
125
126 func (self *stackAlloc) stack(vt reflect.Type) (p Parameter) {
127 p = mkStack(vt, self.s)
128 self.s += uint32(vt.Size())
129 return
130 }
131
132 func (self *stackAlloc) spill(n uint32, a int) uint32 {
133 self.s = alignUp(self.s, a) + n
134 return self.s
135 }
136
137 func (self *stackAlloc) alloc(p []Parameter, vt reflect.Type) []Parameter {
138 nb := vt.Size()
139 vk := vt.Kind()
140
141
142 if nb == 0 {
143 return append(p, mkStack(intType, self.s))
144 }
145
146
147 switch vk {
148 case reflect.Bool : return self.valloc(p, reflect.TypeOf(false))
149 case reflect.Int : return self.valloc(p, intType)
150 case reflect.Int8 : return self.valloc(p, reflect.TypeOf(int8(0)))
151 case reflect.Int16 : return self.valloc(p, reflect.TypeOf(int16(0)))
152 case reflect.Int32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
153 case reflect.Int64 : return self.valloc(p, reflect.TypeOf(int64(0)))
154 case reflect.Uint : return self.valloc(p, reflect.TypeOf(uint(0)))
155 case reflect.Uint8 : return self.valloc(p, reflect.TypeOf(uint8(0)))
156 case reflect.Uint16 : return self.valloc(p, reflect.TypeOf(uint16(0)))
157 case reflect.Uint32 : return self.valloc(p, reflect.TypeOf(uint32(0)))
158 case reflect.Uint64 : return self.valloc(p, reflect.TypeOf(uint64(0)))
159 case reflect.Uintptr : return self.valloc(p, reflect.TypeOf(uintptr(0)))
160 case reflect.Float32 : return self.valloc(p, reflect.TypeOf(float32(0)))
161 case reflect.Float64 : return self.valloc(p, reflect.TypeOf(float64(0)))
162 case reflect.Complex64 : panic("abi: go117: not implemented: complex64")
163 case reflect.Complex128 : panic("abi: go117: not implemented: complex128")
164 case reflect.Array : panic("abi: go117: not implemented: arrays")
165 case reflect.Chan : return self.valloc(p, reflect.TypeOf((chan int)(nil)))
166 case reflect.Func : return self.valloc(p, reflect.TypeOf((func())(nil)))
167 case reflect.Map : return self.valloc(p, reflect.TypeOf((map[int]int)(nil)))
168 case reflect.Ptr : return self.valloc(p, reflect.TypeOf((*int)(nil)))
169 case reflect.UnsafePointer : return self.valloc(p, ptrType)
170 case reflect.Interface : return self.valloc(p, ptrType, ptrType)
171 case reflect.Slice : return self.valloc(p, ptrType, intType, intType)
172 case reflect.String : return self.valloc(p, ptrType, intType)
173 case reflect.Struct : panic("abi: go117: not implemented: structs")
174 default : panic("abi: invalid value type")
175 }
176 }
177
178 func (self *stackAlloc) valloc(p []Parameter, vts ...reflect.Type) []Parameter {
179 for _, vt := range vts {
180 enum := isFloat(vt)
181 if enum != notFloatKind && self.x < len(xregOrderGo) {
182 p = append(p, self.xreg(vt))
183 } else if enum == notFloatKind && self.i < len(iregOrderGo) {
184 p = append(p, self.ireg(vt))
185 } else {
186 p = append(p, self.stack(vt))
187 }
188 }
189 return p
190 }
191
192 func NewFunctionLayout(ft reflect.Type) FunctionLayout {
193 var sa stackAlloc
194 var fn FunctionLayout
195
196
197 for i := 0; i < ft.NumIn(); i++ {
198 fn.Args = sa.alloc(fn.Args, ft.In(i))
199 }
200
201
202 sa.reset()
203
204
205 for i := 0; i < ft.NumOut(); i++ {
206 fn.Rets = sa.alloc(fn.Rets, ft.Out(i))
207 }
208
209 sa.spill(0, PtrAlign)
210
211
212 for i := 0; i < len(fn.Args); i++ {
213 if fn.Args[i].InRegister {
214 fn.Args[i].Mem = sa.spill(PtrSize, PtrAlign) - PtrSize
215 }
216 }
217
218
219 fn.FP = sa.spill(0, PtrAlign)
220 return fn
221 }
222
223 func (self *Frame) emitExchangeArgs(p *Program) {
224 iregArgs := make([]Parameter, 0, len(self.desc.Args))
225 xregArgs := 0
226 for _, v := range self.desc.Args {
227 if v.InRegister {
228 if v.IsFloat != notFloatKind {
229 xregArgs += 1
230 } else {
231 iregArgs = append(iregArgs, v)
232 }
233 } else {
234 panic("not support stack-assgined arguments now")
235 }
236 }
237 if xregArgs > len(xregOrderC) {
238 panic("too many arguments, only support at most 8 integer register arguments now")
239 }
240
241 switch len(iregArgs) {
242 case 0, 1, 2, 3: {
243
244 for i := 0; i < len(iregArgs); i++ {
245 p.MOVQ(iregOrderGo[i], iregOrderC[i])
246 }
247 }
248 case 4, 5, 6: {
249
250 for i := 3; i < len(iregArgs); i++ {
251 arg := iregArgs[i]
252
253 if !arg.IsPointer {
254 p.MOVQ(iregOrderGo[i], Ptr(RSP, int32(self.Prev() + arg.Mem)))
255 }
256 }
257 p.MOVQ(iregOrderGo[0], iregOrderC[0])
258 p.MOVQ(iregOrderGo[1], iregOrderC[1])
259 p.MOVQ(iregOrderGo[2], iregOrderC[2])
260 for i := 3; i < len(iregArgs); i++ {
261 arg := iregArgs[i]
262 p.MOVQ(Ptr(RSP, int32(self.Prev() + arg.Mem)), iregOrderC[i])
263 }
264 }
265 default:
266 panic("too many arguments, only support at most 6 integer register arguments now")
267 }
268 }
269
270 func (self *Frame) emitStackCheck(p *Program, to *Label, maxStack uintptr) {
271 p.LEAQ(Ptr(RSP, int32(-(self.Size() + uint32(maxStack)))), R12)
272 p.CMPQ(Ptr(R14, _G_stackguard0), R12)
273 p.JBE(to)
274 }
275
276 func (self *Frame) StackCheckTextSize() uint32 {
277 p := DefaultArch.CreateProgram()
278 p.LEAQ(Ptr(RSP, int32(-(self.Size()))), R12)
279 p.CMPQ(Ptr(R14, _G_stackguard0), R12)
280 to := CreateLabel("")
281 p.Link(to)
282 p.JBE(to)
283 return uint32(len(p.Assemble(0)))
284 }
285
286 func (self *Frame) emitExchangeRets(p *Program) {
287 if len(self.desc.Rets) > 1 {
288 panic("too many results, only support one result now")
289 }
290
291 if len(self.desc.Rets) == 1 && !self.desc.Rets[0].InRegister {
292 if self.desc.Rets[0].IsFloat == floatKind64 {
293 p.MOVSD(xregOrderC[0], self.retv(0))
294 } else if self.desc.Rets[0].IsFloat == floatKind32 {
295 p.MOVSS(xregOrderC[0], self.retv(0))
296 } else {
297 p.MOVQ(RAX, self.retv(0))
298 }
299 }
300 }
301
302 func (self *Frame) emitRestoreRegs(p *Program) {
303
304 for i, r := range ReservedRegs(self.ccall) {
305 switch r.(type) {
306 case Register64:
307 p.MOVQ(self.resv(i), r)
308 case XMMRegister:
309 p.MOVSD(self.resv(i), r)
310 default:
311 panic(fmt.Sprintf("unsupported register type %t to reserve", r))
312 }
313 }
314
315 p.XORPS(zeroRegGo, zeroRegGo)
316 }
View as plain text