1
2
3
18
19 package encoder
20
21 import (
22 `fmt`
23 `reflect`
24 `strconv`
25 `unsafe`
26
27 `github.com/bytedance/sonic/internal/cpu`
28 `github.com/bytedance/sonic/internal/jit`
29 `github.com/bytedance/sonic/internal/native/types`
30 `github.com/twitchyliquid64/golang-asm/obj`
31 `github.com/twitchyliquid64/golang-asm/obj/x86`
32
33 `github.com/bytedance/sonic/internal/native`
34 `github.com/bytedance/sonic/internal/rt`
35 )
36
37
55
56
67
68 const (
69 _S_cond = iota
70 _S_init
71 )
72
73 const (
74 _FP_args = 48
75 _FP_fargs = 64
76 _FP_saves = 64
77 _FP_locals = 24
78 )
79
80 const (
81 _FP_offs = _FP_fargs + _FP_saves + _FP_locals
82 _FP_size = _FP_offs + 8
83 _FP_base = _FP_size + 8
84 )
85
86 const (
87 _FM_exp32 = 0x7f800000
88 _FM_exp64 = 0x7ff0000000000000
89 )
90
91 const (
92 _IM_null = 0x6c6c756e
93 _IM_true = 0x65757274
94 _IM_fals = 0x736c6166
95 _IM_open = 0x00225c22
96 _IM_array = 0x5d5b
97 _IM_object = 0x7d7b
98 _IM_mulv = -0x5555555555555555
99 )
100
101 const (
102 _LB_more_space = "_more_space"
103 _LB_more_space_return = "_more_space_return_"
104 )
105
106 const (
107 _LB_error = "_error"
108 _LB_error_too_deep = "_error_too_deep"
109 _LB_error_invalid_number = "_error_invalid_number"
110 _LB_error_nan_or_infinite = "_error_nan_or_infinite"
111 _LB_panic = "_panic"
112 )
113
114 var (
115 _AX = jit.Reg("AX")
116 _CX = jit.Reg("CX")
117 _DX = jit.Reg("DX")
118 _DI = jit.Reg("DI")
119 _SI = jit.Reg("SI")
120 _BP = jit.Reg("BP")
121 _SP = jit.Reg("SP")
122 _R8 = jit.Reg("R8")
123 )
124
125 var (
126 _X0 = jit.Reg("X0")
127 _Y0 = jit.Reg("Y0")
128 )
129
130 var (
131 _ST = jit.Reg("BX")
132 _RP = jit.Reg("DI")
133 _RL = jit.Reg("SI")
134 _RC = jit.Reg("DX")
135 )
136
137 var (
138 _LR = jit.Reg("R9")
139 _R10 = jit.Reg("R10")
140 _ET = jit.Reg("R10")
141 _EP = jit.Reg("R11")
142 )
143
144 var (
145 _SP_p = jit.Reg("R12")
146 _SP_q = jit.Reg("R13")
147 _SP_x = jit.Reg("R14")
148 _SP_f = jit.Reg("R15")
149 )
150
151 var (
152 _ARG_rb = jit.Ptr(_SP, _FP_base)
153 _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
154 _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
155 _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
156 )
157
158 var (
159 _RET_et = jit.Ptr(_SP, _FP_base + 32)
160 _RET_ep = jit.Ptr(_SP, _FP_base + 40)
161 )
162
163 var (
164 _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
165 _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
166 _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
167 )
168
169 var (
170 _REG_ffi = []obj.Addr{_RP, _RL, _RC}
171 _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
172 _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
173 _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
174 )
175
176 type _Assembler struct {
177 jit.BaseAssembler
178 p _Program
179 x int
180 name string
181 }
182
183 func newAssembler(p _Program) *_Assembler {
184 return new(_Assembler).Init(p)
185 }
186
187
188 func (self *_Assembler) Load() _Encoder {
189 return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
190 }
191
192 func (self *_Assembler) Init(p _Program) *_Assembler {
193 self.p = p
194 self.BaseAssembler.Init(self.compile)
195 return self
196 }
197
198 func (self *_Assembler) compile() {
199 self.prologue()
200 self.instrs()
201 self.epilogue()
202 self.builtins()
203 }
204
205
206
207 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
208 _OP_null : (*_Assembler)._asm_OP_null,
209 _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
210 _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
211 _OP_bool : (*_Assembler)._asm_OP_bool,
212 _OP_i8 : (*_Assembler)._asm_OP_i8,
213 _OP_i16 : (*_Assembler)._asm_OP_i16,
214 _OP_i32 : (*_Assembler)._asm_OP_i32,
215 _OP_i64 : (*_Assembler)._asm_OP_i64,
216 _OP_u8 : (*_Assembler)._asm_OP_u8,
217 _OP_u16 : (*_Assembler)._asm_OP_u16,
218 _OP_u32 : (*_Assembler)._asm_OP_u32,
219 _OP_u64 : (*_Assembler)._asm_OP_u64,
220 _OP_f32 : (*_Assembler)._asm_OP_f32,
221 _OP_f64 : (*_Assembler)._asm_OP_f64,
222 _OP_str : (*_Assembler)._asm_OP_str,
223 _OP_bin : (*_Assembler)._asm_OP_bin,
224 _OP_quote : (*_Assembler)._asm_OP_quote,
225 _OP_number : (*_Assembler)._asm_OP_number,
226 _OP_eface : (*_Assembler)._asm_OP_eface,
227 _OP_iface : (*_Assembler)._asm_OP_iface,
228 _OP_byte : (*_Assembler)._asm_OP_byte,
229 _OP_text : (*_Assembler)._asm_OP_text,
230 _OP_deref : (*_Assembler)._asm_OP_deref,
231 _OP_index : (*_Assembler)._asm_OP_index,
232 _OP_load : (*_Assembler)._asm_OP_load,
233 _OP_save : (*_Assembler)._asm_OP_save,
234 _OP_drop : (*_Assembler)._asm_OP_drop,
235 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
236 _OP_recurse : (*_Assembler)._asm_OP_recurse,
237 _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
238 _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
239 _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
240 _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
241 _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
242 _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
243 _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
244 _OP_goto : (*_Assembler)._asm_OP_goto,
245 _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
246 _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
247 _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
248 _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
249 _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
250 _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
251 _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
252 _OP_marshal : (*_Assembler)._asm_OP_marshal,
253 _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
254 _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
255 _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
256 _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
257 _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
258 }
259
260 func (self *_Assembler) instr(v *_Instr) {
261 if fn := _OpFuncTab[v.op()]; fn != nil {
262 fn(self, v)
263 } else {
264 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
265 }
266 }
267
268 func (self *_Assembler) instrs() {
269 for i, v := range self.p {
270 self.Mark(i)
271 self.instr(&v)
272 self.debug_instr(i, &v)
273 }
274 }
275
276 func (self *_Assembler) builtins() {
277 self.more_space()
278 self.error_too_deep()
279 self.error_invalid_number()
280 self.error_nan_or_infinite()
281 self.go_panic()
282 }
283
284 func (self *_Assembler) epilogue() {
285 self.Mark(len(self.p))
286 self.Emit("XORL", _ET, _ET)
287 self.Emit("XORL", _EP, _EP)
288 self.Link(_LB_error)
289 self.Emit("MOVQ", _ARG_rb, _AX)
290 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
291 self.Emit("MOVQ", _ET, _RET_et)
292 self.Emit("MOVQ", _EP, _RET_ep)
293 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
294 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
295 self.Emit("RET")
296 }
297
298 func (self *_Assembler) prologue() {
299 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
300 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
301 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
302 self.load_buffer()
303 self.Emit("MOVQ", _ARG_vp, _SP_p)
304 self.Emit("MOVQ", _ARG_sb, _ST)
305 self.Emit("XORL", _SP_x, _SP_x)
306 self.Emit("XORL", _SP_f, _SP_f)
307 self.Emit("XORL", _SP_q, _SP_q)
308 }
309
310
311
312 func (self *_Assembler) xsave(reg ...obj.Addr) {
313 for i, v := range reg {
314 if i > _FP_saves / 8 - 1 {
315 panic("too many registers to save")
316 } else {
317 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
318 }
319 }
320 }
321
322 func (self *_Assembler) xload(reg ...obj.Addr) {
323 for i, v := range reg {
324 if i > _FP_saves / 8 - 1 {
325 panic("too many registers to load")
326 } else {
327 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
328 }
329 }
330 }
331
332 func (self *_Assembler) rbuf_di() {
333 if _RP.Reg != x86.REG_DI {
334 panic("register allocation messed up: RP != DI")
335 } else {
336 self.Emit("ADDQ", _RL, _RP)
337 }
338 }
339
340 func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
341 self.check_size(nd)
342 self.save_c()
343 self.rbuf_di()
344 self.Emit(ins, jit.Ptr(_SP_p, 0), _SI)
345 self.call_c(fn)
346 self.Emit("ADDQ", _AX, _RL)
347 }
348
349 func (self *_Assembler) store_str(s string) {
350 i := 0
351 m := rt.Str2Mem(s)
352
353
354 for i <= len(m) - 8 {
355 self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)
356 self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i)))
357 i += 8
358 }
359
360
361 if i <= len(m) - 4 {
362 self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
363 i += 4
364 }
365
366
367 if i <= len(m) - 2 {
368 self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
369 i += 2
370 }
371
372
373 if i < len(m) {
374 self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i)))
375 }
376 }
377
378 func (self *_Assembler) check_size(n int) {
379 self.check_size_rl(jit.Ptr(_RL, int64(n)))
380 }
381
382 func (self *_Assembler) check_size_r(r obj.Addr, d int) {
383 self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
384 }
385
386 func (self *_Assembler) check_size_rl(v obj.Addr) {
387 idx := self.x
388 key := _LB_more_space_return + strconv.Itoa(idx)
389
390
391 if _LR.Reg != x86.REG_R9 {
392 panic("register allocation messed up: LR != R9")
393 }
394
395
396 self.x++
397 self.Emit("LEAQ", v, _AX)
398 self.Emit("CMPQ", _AX, _RC)
399 self.Sjmp("JBE" , key)
400 self.slice_grow_ax(key)
401 self.Link(key)
402 }
403
404 func (self *_Assembler) slice_grow_ax(ret string) {
405 self.Byte(0x4c, 0x8d, 0x0d)
406 self.Sref(ret, 4)
407 self.Sjmp("JMP" , _LB_more_space)
408 }
409
410
411
412 const (
413 _StateSize = int64(unsafe.Sizeof(_State{}))
414 _StackLimit = _MaxStack * _StateSize
415 )
416
417 func (self *_Assembler) save_state() {
418 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
419 self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8)
420 self.Emit("CMPQ", _R8, jit.Imm(_StackLimit))
421 self.Sjmp("JAE" , _LB_error_too_deep)
422 self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))
423 self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16))
424 self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))
425 self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))
426 self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0))
427 }
428
429 func (self *_Assembler) drop_state(decr int64) {
430 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
431 self.Emit("SUBQ" , jit.Imm(decr), _AX)
432 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
433 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x)
434 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f)
435 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p)
436 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q)
437 self.Emit("PXOR" , _X0, _X0)
438 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
439 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))
440 }
441
442
443
444 func (self *_Assembler) add_char(ch byte) {
445 self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
446 self.Emit("ADDQ", jit.Imm(1), _RL)
447 }
448
449 func (self *_Assembler) add_long(ch uint32, n int64) {
450 self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
451 self.Emit("ADDQ", jit.Imm(n), _RL)
452 }
453
454 func (self *_Assembler) add_text(ss string) {
455 self.store_str(ss)
456 self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL)
457 }
458
459 func (self *_Assembler) prep_buffer() {
460 self.Emit("MOVQ", _ARG_rb, _AX)
461 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
462 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
463 }
464
465 func (self *_Assembler) prep_buffer_c() {
466 self.Emit("MOVQ", _ARG_rb, _DI)
467 self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))
468 }
469
470 func (self *_Assembler) save_buffer() {
471 self.Emit("MOVQ", _ARG_rb, _CX)
472 self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0))
473 self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))
474 self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16))
475 }
476
477 func (self *_Assembler) load_buffer() {
478 self.Emit("MOVQ", _ARG_rb, _AX)
479 self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)
480 self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)
481 self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)
482 }
483
484
485
486 func (self *_Assembler) call(pc obj.Addr) {
487 self.Emit("MOVQ", pc, _AX)
488 self.Rjmp("CALL", _AX)
489 }
490
491 func (self *_Assembler) save_c() {
492 self.xsave(_REG_ffi...)
493 }
494
495 func (self *_Assembler) call_c(pc obj.Addr) {
496 self.call(pc)
497 self.xload(_REG_ffi...)
498 }
499
500 func (self *_Assembler) call_go(pc obj.Addr) {
501 self.xsave(_REG_all...)
502 self.call(pc)
503 self.xload(_REG_all...)
504 }
505
506 func (self *_Assembler) call_encoder(pc obj.Addr) {
507 self.xsave(_REG_enc...)
508 self.call(pc)
509 self.xload(_REG_enc...)
510 self.load_buffer()
511 }
512
513 func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
514 switch vt.Kind() {
515 case reflect.Interface : self.call_marshaler_i(fn, it)
516 case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true)
517
518 default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
519 }
520 }
521
522 func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
523 self.Emit("MOVQ" , jit.Gtype(it), _AX)
524 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
525 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
526 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)
527 self.Emit("TESTQ", _AX, _AX)
528 self.Sjmp("JZ" , "_null_{n}")
529 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))
530 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16))
531 self.call_go(_F_assertI2I)
532 self.prep_buffer()
533 self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0)
534 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))
535 self.Emit("MOVQ", _ARG_fv, _CX)
536 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))
537 self.call_encoder(fn)
538 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
539 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
540 self.Emit("TESTQ", _ET, _ET)
541 self.Sjmp("JNZ" , _LB_error)
542 self.Sjmp("JMP" , "_done_{n}")
543 self.Link("_null_{n}")
544 self.check_size(4)
545 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
546 self.Emit("ADDQ", jit.Imm(4), _RL)
547 self.Link("_done_{n}")
548 }
549
550 func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
551 self.prep_buffer()
552 self.Emit("MOVQ", jit.Itab(it, vt), _AX)
553 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
554
555
556 if !deref {
557 self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16))
558 } else {
559 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)
560 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
561 }
562
563
564 self.Emit("MOVQ", _ARG_fv, _CX)
565 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))
566 self.call_encoder(fn)
567 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
568 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
569 self.Emit("TESTQ", _ET, _ET)
570 self.Sjmp("JNZ" , _LB_error)
571 }
572
573
574
575 var (
576 _T_byte = jit.Type(byteType)
577 _F_growslice = jit.Func(growslice)
578 )
579
580 func (self *_Assembler) more_space() {
581 self.Link(_LB_more_space)
582 self.Emit("MOVQ", _T_byte, _AX)
583 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
584 self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8))
585 self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16))
586 self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24))
587 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32))
588 self.xsave(_REG_jsr...)
589 self.call(_F_growslice)
590 self.xload(_REG_jsr...)
591 self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP)
592 self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL)
593 self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC)
594 self.save_buffer()
595 self.Rjmp("JMP" , _LR)
596 }
597
598
599
600 var (
601 _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
602 _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
603 _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
604 )
605
606 func (self *_Assembler) error_too_deep() {
607 self.Link(_LB_error_too_deep)
608 self.Emit("MOVQ", _V_ERR_too_deep, _EP)
609 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
610 self.Sjmp("JMP" , _LB_error)
611 }
612
613 func (self *_Assembler) error_invalid_number() {
614 self.Link(_LB_error_invalid_number)
615 self.call_go(_F_error_number)
616 self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET)
617 self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP)
618 self.Sjmp("JMP" , _LB_error)
619 }
620
621 func (self *_Assembler) error_nan_or_infinite() {
622 self.Link(_LB_error_nan_or_infinite)
623 self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)
624 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
625 self.Sjmp("JMP" , _LB_error)
626 }
627
628
629
630 var (
631 _F_quote = jit.Imm(int64(native.S_quote))
632 _F_panic = jit.Func(goPanic)
633 )
634
635 func (self *_Assembler) go_panic() {
636 self.Link(_LB_panic)
637 self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8))
638 self.call_go(_F_panic)
639 }
640
641 func (self *_Assembler) encode_string(doubleQuote bool) {
642 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)
643 self.Emit("TESTQ", _AX, _AX)
644 self.Sjmp("JZ" , "_str_empty_{n}")
645 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
646 self.Sjmp("JNE" , "_str_next_{n}")
647 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
648 self.Sjmp("JMP", _LB_panic)
649 self.Link("_str_next_{n}")
650
651
652 if !doubleQuote {
653 self.check_size_r(_AX, 2)
654 self.add_char('"')
655 } else {
656 self.check_size_r(_AX, 6)
657 self.add_long(_IM_open, 3)
658 }
659
660
661 self.Emit("XORL", _AX, _AX)
662 self.Emit("MOVQ", _AX, _VAR_sp)
663 self.Link("_str_loop_{n}")
664 self.save_c()
665
666
668 self.Emit("MOVQ", _RC, _CX)
669 self.Emit("SUBQ", _RL, _CX)
670 self.Emit("MOVQ", _CX, _VAR_dn)
671 self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX)
672 self.Emit("LEAQ", _VAR_dn, _CX)
673 self.Emit("MOVQ", _VAR_sp, _AX)
674 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)
675 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)
676 self.Emit("ADDQ", _AX, _DI)
677 self.Emit("SUBQ", _AX, _SI)
678
679
680 if !doubleQuote {
681 self.Emit("XORL", _R8, _R8)
682 } else {
683 self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
684 }
685
686
687 self.call_c(_F_quote)
688 self.Emit("ADDQ" , _VAR_dn, _RL)
689 self.Emit("TESTQ", _AX, _AX)
690 self.Sjmp("JS" , "_str_space_{n}")
691
692
693 if !doubleQuote {
694 self.check_size(1)
695 self.add_char('"')
696 self.Sjmp("JMP", "_str_end_{n}")
697 } else {
698 self.check_size(3)
699 self.add_text("\\\"\"")
700 self.Sjmp("JMP", "_str_end_{n}")
701 }
702
703
704 self.Link("_str_space_{n}")
705 self.Emit("NOTQ", _AX)
706 self.Emit("ADDQ", _AX, _VAR_sp)
707 self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX)
708 self.slice_grow_ax("_str_loop_{n}")
709
710
711 if !doubleQuote {
712 self.Link("_str_empty_{n}")
713 self.check_size(2)
714 self.add_text("\"\"")
715 self.Link("_str_end_{n}")
716 } else {
717 self.Link("_str_empty_{n}")
718 self.check_size(6)
719 self.add_text("\"\\\"\\\"\"")
720 self.Link("_str_end_{n}")
721 }
722 }
723
724
725
726 var (
727 _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
728 _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
729 )
730
731 var (
732 _F_f64toa = jit.Imm(int64(native.S_f64toa))
733 _F_f32toa = jit.Imm(int64(native.S_f32toa))
734 _F_i64toa = jit.Imm(int64(native.S_i64toa))
735 _F_u64toa = jit.Imm(int64(native.S_u64toa))
736 _F_b64encode = jit.Imm(int64(_subr__b64encode))
737 )
738
739 var (
740 _F_memmove = jit.Func(memmove)
741 _F_error_number = jit.Func(error_number)
742 _F_isValidNumber = jit.Func(isValidNumber)
743 )
744
745 var (
746 _F_iteratorStop = jit.Func(iteratorStop)
747 _F_iteratorNext = jit.Func(iteratorNext)
748 _F_iteratorStart = jit.Func(iteratorStart)
749 )
750
751 var (
752 _F_encodeTypedPointer obj.Addr
753 _F_encodeJsonMarshaler obj.Addr
754 _F_encodeTextMarshaler obj.Addr
755 )
756
757 const (
758 _MODE_AVX2 = 1 << 2
759 )
760
761 func init() {
762 _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
763 _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
764 _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
765 }
766
767 func (self *_Assembler) _asm_OP_null(_ *_Instr) {
768 self.check_size(4)
769 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
770 self.Emit("ADDQ", jit.Imm(4), _RL)
771 }
772
773 func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
774 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
775 self.Sjmp("JC", "_empty_arr_{n}")
776 self._asm_OP_null(nil)
777 self.Sjmp("JMP", "_empty_arr_end_{n}")
778 self.Link("_empty_arr_{n}")
779 self.check_size(2)
780 self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
781 self.Emit("ADDQ", jit.Imm(2), _RL)
782 self.Link("_empty_arr_end_{n}")
783 }
784
785 func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
786 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
787 self.Sjmp("JC", "_empty_obj_{n}")
788 self._asm_OP_null(nil)
789 self.Sjmp("JMP", "_empty_obj_end_{n}")
790 self.Link("_empty_obj_{n}")
791 self.check_size(2)
792 self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
793 self.Emit("ADDQ", jit.Imm(2), _RL)
794 self.Link("_empty_obj_end_{n}")
795 }
796
797 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
798 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
799 self.Sjmp("JE" , "_false_{n}")
800 self.check_size(4)
801 self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0))
802 self.Emit("ADDQ", jit.Imm(4), _RL)
803 self.Sjmp("JMP" , "_end_{n}")
804 self.Link("_false_{n}")
805 self.check_size(5)
806 self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0))
807 self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))
808 self.Emit("ADDQ", jit.Imm(5), _RL)
809 self.Link("_end_{n}")
810 }
811
812 func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
813 self.store_int(4, _F_i64toa, "MOVBQSX")
814 }
815
816 func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
817 self.store_int(6, _F_i64toa, "MOVWQSX")
818 }
819
820 func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
821 self.store_int(17, _F_i64toa, "MOVLQSX")
822 }
823
824 func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
825 self.store_int(21, _F_i64toa, "MOVQ")
826 }
827
828 func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
829 self.store_int(3, _F_u64toa, "MOVBQZX")
830 }
831
832 func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
833 self.store_int(5, _F_u64toa, "MOVWQZX")
834 }
835
836 func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
837 self.store_int(16, _F_u64toa, "MOVLQZX")
838 }
839
840 func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
841 self.store_int(20, _F_u64toa, "MOVQ")
842 }
843
844 func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
845 self.check_size(32)
846 self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX)
847 self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX)
848 self.Emit("XORL" , jit.Imm(_FM_exp32), _AX)
849 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
850 self.save_c()
851 self.rbuf_di()
852 self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0)
853 self.call_c(_F_f32toa)
854 self.Emit("ADDQ" , _AX, _RL)
855 }
856
857 func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
858 self.check_size(32)
859 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
860 self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX)
861 self.Emit("ANDQ" , _CX, _AX)
862 self.Emit("XORQ" , _CX, _AX)
863 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
864 self.save_c()
865 self.rbuf_di()
866 self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0)
867 self.call_c(_F_f64toa)
868 self.Emit("ADDQ" , _AX, _RL)
869 }
870
871 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
872 self.encode_string(false)
873 }
874
875 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
876 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)
877 self.Emit("ADDQ", jit.Imm(2), _AX)
878 self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)
879 self.Emit("MOVQ", _DX, _R8)
880 self.From("MULQ", _CX)
881 self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX)
882 self.Emit("ORQ" , jit.Imm(2), _AX)
883 self.Emit("MOVQ", _R8, _DX)
884 self.check_size_r(_AX, 0)
885 self.add_char('"')
886 self.save_c()
887 self.prep_buffer_c()
888 self.Emit("MOVQ", _SP_p, _SI)
889
890
891 if !cpu.HasAVX2 {
892 self.Emit("XORL", _DX, _DX)
893 } else {
894 self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX)
895 }
896
897
898 self.call_c(_F_b64encode)
899 self.load_buffer()
900 self.add_char('"')
901 }
902
903 func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
904 self.encode_string(true)
905 }
906
907 func (self *_Assembler) _asm_OP_number(_ *_Instr) {
908 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)
909 self.Emit("TESTQ", _CX, _CX)
910 self.Sjmp("JZ" , "_empty_{n}")
911 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
912 self.Emit("TESTQ", _AX, _AX)
913 self.Sjmp("JNZ" , "_number_next_{n}")
914 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0))
915 self.Sjmp("JMP", _LB_panic)
916 self.Link("_number_next_{n}")
917 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
918 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))
919 self.call_go(_F_isValidNumber)
920 self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0))
921 self.Sjmp("JE" , _LB_error_invalid_number)
922 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)
923 self.check_size_r(_AX, 0)
924 self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX)
925 self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL)
926 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
927 self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0)
928 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))
929 self.call_go(_F_memmove)
930 self.Sjmp("JMP" , "_done_{n}")
931 self.Link("_empty_{n}")
932 self.check_size(1)
933 self.add_char('0')
934 self.Link("_done_{n}")
935 }
936
937 func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
938 self.prep_buffer()
939 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
940 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))
941 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX)
942 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))
943 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))
944 self.Emit("MOVQ" , _ARG_fv, _AX)
945 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))
946 self.call_encoder(_F_encodeTypedPointer)
947 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)
948 self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)
949 self.Emit("TESTQ", _ET, _ET)
950 self.Sjmp("JNZ" , _LB_error)
951 }
952
953 func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
954 self.prep_buffer()
955 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
956 self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX)
957 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))
958 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX)
959 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))
960 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))
961 self.Emit("MOVQ" , _ARG_fv, _AX)
962 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))
963 self.call_encoder(_F_encodeTypedPointer)
964 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)
965 self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)
966 self.Emit("TESTQ", _ET, _ET)
967 self.Sjmp("JNZ" , _LB_error)
968 }
969
970 func (self *_Assembler) _asm_OP_byte(p *_Instr) {
971 self.check_size(1)
972 self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0))
973 self.Emit("ADDQ", jit.Imm(1), _RL)
974 }
975
976 func (self *_Assembler) _asm_OP_text(p *_Instr) {
977 self.check_size(len(p.vs()))
978 self.add_text(p.vs())
979 }
980
981 func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
982 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)
983 }
984
985 func (self *_Assembler) _asm_OP_index(p *_Instr) {
986 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
987 self.Emit("ADDQ", _AX, _SP_p)
988 }
989
990 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
991 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
992 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x)
993 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)
994 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)
995 }
996
997 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
998 self.save_state()
999 }
1000
1001 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1002 self.drop_state(_StateSize)
1003 }
1004
1005 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1006 self.drop_state(_StateSize * 2)
1007 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56))
1008 }
1009
1010 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1011 self.prep_buffer()
1012 vt, pv := p.vp()
1013 self.Emit("MOVQ", jit.Type(vt), _AX)
1014 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
1015
1016
1017 if !rt.UnpackType(vt).Indirect() {
1018 self.Emit("MOVQ", _SP_p, _AX)
1019 } else {
1020 self.Emit("MOVQ", _SP_p, _VAR_vp)
1021 self.Emit("LEAQ", _VAR_vp, _AX)
1022 }
1023
1024
1025 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))
1026 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24))
1027 self.Emit("MOVQ" , _ARG_fv, _AX)
1028 if pv {
1029 self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX)
1030 }
1031 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))
1032 self.call_encoder(_F_encodeTypedPointer)
1033 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET)
1034 self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP)
1035 self.Emit("TESTQ", _ET, _ET)
1036 self.Sjmp("JNZ" , _LB_error)
1037 }
1038
1039 func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
1040 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1041 self.Xjmp("JE" , p.vi())
1042 }
1043
1044 func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
1045 self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0))
1046 self.Xjmp("JE" , p.vi())
1047 }
1048
1049 func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
1050 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
1051 self.Xjmp("JE" , p.vi())
1052 }
1053
1054 func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
1055 self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0))
1056 self.Xjmp("JE" , p.vi())
1057 }
1058
1059 func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
1060 self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0))
1061 self.Xjmp("JE" , p.vi())
1062 }
1063
1064 func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
1065 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1066 self.Xjmp("JE" , p.vi())
1067 }
1068
1069 func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
1070 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
1071 self.Emit("TESTQ", _AX, _AX)
1072 self.Xjmp("JZ" , p.vi())
1073 self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0))
1074 self.Xjmp("JE" , p.vi())
1075 }
1076
1077 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1078 self.Xjmp("JMP", p.vi())
1079 }
1080
1081 func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
1082 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1083 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX)
1084 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1085 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))
1086 self.Emit("MOVQ" , _ARG_fv, _AX)
1087 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16))
1088 self.call_go(_F_iteratorStart)
1089 self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q)
1090 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
1091 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
1092 self.Emit("TESTQ", _ET, _ET)
1093 self.Sjmp("JNZ" , _LB_error)
1094 }
1095
1096 func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
1097 self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0))
1098 self.call_go(_F_iteratorStop)
1099 self.Emit("XORL", _SP_q, _SP_q)
1100 }
1101
1102 func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
1103 self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p)
1104 self.Emit("TESTQ", _SP_p, _SP_p)
1105 self.Xjmp("JZ" , p.vi())
1106 }
1107
1108 func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
1109 self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv)
1110 self.Sjmp("JNC", "_unordered_key_{n}")
1111 self.encode_string(false)
1112 self.Xjmp("JMP", p.vi())
1113 self.Link("_unordered_key_{n}")
1114 }
1115
1116 func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
1117 self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p)
1118 self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0))
1119 self.call_go(_F_iteratorNext)
1120 }
1121
1122 func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
1123 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x)
1124 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p)
1125 self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f)
1126 }
1127
1128 func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
1129 self.Emit("TESTQ" , _SP_x, _SP_x)
1130 self.Xjmp("JZ" , p.vi())
1131 self.Emit("SUBQ" , jit.Imm(1), _SP_x)
1132 self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f)
1133 self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX)
1134 self.Emit("CMOVQCC", _AX, _SP_p)
1135 }
1136
1137 func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
1138 self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
1139 }
1140
1141 func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
1142 if p.vk() != reflect.Ptr {
1143 panic("marshal_p: invalid type")
1144 } else {
1145 self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
1146 }
1147 }
1148
1149 func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
1150 self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
1151 }
1152
1153 func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
1154 if p.vk() != reflect.Ptr {
1155 panic("marshal_text_p: invalid type")
1156 } else {
1157 self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
1158 }
1159 }
1160
1161 func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
1162 self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f)
1163 }
1164
1165 func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
1166 self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f)
1167 self.Xjmp("JC" , p.vi())
1168 }
1169
1170 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1171 self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))
1172 self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8))
1173 self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0))
1174 self.call_go(_F_println)
1175 }
1176
View as plain text