1
2
3
18
19 package encoder
20
21 import (
22 `fmt`
23 `reflect`
24 `strconv`
25 `unsafe`
26
27 `github.com/bytedance/sonic/internal/cpu`
28 `github.com/bytedance/sonic/internal/jit`
29 `github.com/bytedance/sonic/internal/native/types`
30 `github.com/twitchyliquid64/golang-asm/obj`
31 `github.com/twitchyliquid64/golang-asm/obj/x86`
32
33 `github.com/bytedance/sonic/internal/native`
34 `github.com/bytedance/sonic/internal/rt`
35 )
36
37
55
56
67
68 const (
69 _S_cond = iota
70 _S_init
71 )
72
73 const (
74 _FP_args = 32
75 _FP_fargs = 40
76 _FP_saves = 64
77 _FP_locals = 24
78 )
79
80 const (
81 _FP_loffs = _FP_fargs + _FP_saves
82 _FP_offs = _FP_loffs + _FP_locals
83
84 _FP_size = _FP_offs + 8
85 _FP_base = _FP_size + 8
86 )
87
88 const (
89 _FM_exp32 = 0x7f800000
90 _FM_exp64 = 0x7ff0000000000000
91 )
92
93 const (
94 _IM_null = 0x6c6c756e
95 _IM_true = 0x65757274
96 _IM_fals = 0x736c6166
97 _IM_open = 0x00225c22
98 _IM_array = 0x5d5b
99 _IM_object = 0x7d7b
100 _IM_mulv = -0x5555555555555555
101 )
102
103 const (
104 _LB_more_space = "_more_space"
105 _LB_more_space_return = "_more_space_return_"
106 )
107
108 const (
109 _LB_error = "_error"
110 _LB_error_too_deep = "_error_too_deep"
111 _LB_error_invalid_number = "_error_invalid_number"
112 _LB_error_nan_or_infinite = "_error_nan_or_infinite"
113 _LB_panic = "_panic"
114 )
115
116 var (
117 _AX = jit.Reg("AX")
118 _BX = jit.Reg("BX")
119 _CX = jit.Reg("CX")
120 _DX = jit.Reg("DX")
121 _DI = jit.Reg("DI")
122 _SI = jit.Reg("SI")
123 _BP = jit.Reg("BP")
124 _SP = jit.Reg("SP")
125 _R8 = jit.Reg("R8")
126 _R9 = jit.Reg("R9")
127 )
128
129 var (
130 _X0 = jit.Reg("X0")
131 _Y0 = jit.Reg("Y0")
132 )
133
134 var (
135 _ST = jit.Reg("R15")
136 _RP = jit.Reg("DI")
137 _RL = jit.Reg("SI")
138 _RC = jit.Reg("DX")
139 )
140
141 var (
142 _LR = jit.Reg("R9")
143 _ET = jit.Reg("AX")
144 _EP = jit.Reg("BX")
145 )
146
147 var (
148 _SP_p = jit.Reg("R10")
149 _SP_q = jit.Reg("R11")
150 _SP_x = jit.Reg("R12")
151 _SP_f = jit.Reg("R13")
152 )
153
154 var (
155 _ARG_rb = jit.Ptr(_SP, _FP_base)
156 _ARG_vp = jit.Ptr(_SP, _FP_base + 8)
157 _ARG_sb = jit.Ptr(_SP, _FP_base + 16)
158 _ARG_fv = jit.Ptr(_SP, _FP_base + 24)
159 )
160
161 var (
162 _RET_et = _ET
163 _RET_ep = _EP
164 )
165
166 var (
167 _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves)
168 _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
169 _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
170 )
171
172 var (
173 _REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q}
174 _REG_b64 = []obj.Addr{_SP_p, _SP_q}
175
176 _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC}
177 _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR}
178 _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL}
179 )
180
181 type _Assembler struct {
182 jit.BaseAssembler
183 p _Program
184 x int
185 name string
186 }
187
188 func newAssembler(p _Program) *_Assembler {
189 return new(_Assembler).Init(p)
190 }
191
192
193
194 func (self *_Assembler) Load() _Encoder {
195 return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
196 }
197
198 func (self *_Assembler) Init(p _Program) *_Assembler {
199 self.p = p
200 self.BaseAssembler.Init(self.compile)
201 return self
202 }
203
204 func (self *_Assembler) compile() {
205 self.prologue()
206 self.instrs()
207 self.epilogue()
208 self.builtins()
209 }
210
211
212
213 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
214 _OP_null : (*_Assembler)._asm_OP_null,
215 _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr,
216 _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj,
217 _OP_bool : (*_Assembler)._asm_OP_bool,
218 _OP_i8 : (*_Assembler)._asm_OP_i8,
219 _OP_i16 : (*_Assembler)._asm_OP_i16,
220 _OP_i32 : (*_Assembler)._asm_OP_i32,
221 _OP_i64 : (*_Assembler)._asm_OP_i64,
222 _OP_u8 : (*_Assembler)._asm_OP_u8,
223 _OP_u16 : (*_Assembler)._asm_OP_u16,
224 _OP_u32 : (*_Assembler)._asm_OP_u32,
225 _OP_u64 : (*_Assembler)._asm_OP_u64,
226 _OP_f32 : (*_Assembler)._asm_OP_f32,
227 _OP_f64 : (*_Assembler)._asm_OP_f64,
228 _OP_str : (*_Assembler)._asm_OP_str,
229 _OP_bin : (*_Assembler)._asm_OP_bin,
230 _OP_quote : (*_Assembler)._asm_OP_quote,
231 _OP_number : (*_Assembler)._asm_OP_number,
232 _OP_eface : (*_Assembler)._asm_OP_eface,
233 _OP_iface : (*_Assembler)._asm_OP_iface,
234 _OP_byte : (*_Assembler)._asm_OP_byte,
235 _OP_text : (*_Assembler)._asm_OP_text,
236 _OP_deref : (*_Assembler)._asm_OP_deref,
237 _OP_index : (*_Assembler)._asm_OP_index,
238 _OP_load : (*_Assembler)._asm_OP_load,
239 _OP_save : (*_Assembler)._asm_OP_save,
240 _OP_drop : (*_Assembler)._asm_OP_drop,
241 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
242 _OP_recurse : (*_Assembler)._asm_OP_recurse,
243 _OP_is_nil : (*_Assembler)._asm_OP_is_nil,
244 _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1,
245 _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1,
246 _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2,
247 _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4,
248 _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8,
249 _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map,
250 _OP_goto : (*_Assembler)._asm_OP_goto,
251 _OP_map_iter : (*_Assembler)._asm_OP_map_iter,
252 _OP_map_stop : (*_Assembler)._asm_OP_map_stop,
253 _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key,
254 _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key,
255 _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next,
256 _OP_slice_len : (*_Assembler)._asm_OP_slice_len,
257 _OP_slice_next : (*_Assembler)._asm_OP_slice_next,
258 _OP_marshal : (*_Assembler)._asm_OP_marshal,
259 _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p,
260 _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text,
261 _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p,
262 _OP_cond_set : (*_Assembler)._asm_OP_cond_set,
263 _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc,
264 }
265
266 func (self *_Assembler) instr(v *_Instr) {
267 if fn := _OpFuncTab[v.op()]; fn != nil {
268 fn(self, v)
269 } else {
270 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
271 }
272 }
273
274 func (self *_Assembler) instrs() {
275 for i, v := range self.p {
276 self.Mark(i)
277 self.instr(&v)
278 self.debug_instr(i, &v)
279 }
280 }
281
282 func (self *_Assembler) builtins() {
283 self.more_space()
284 self.error_too_deep()
285 self.error_invalid_number()
286 self.error_nan_or_infinite()
287 self.go_panic()
288 }
289
290 func (self *_Assembler) epilogue() {
291 self.Mark(len(self.p))
292 self.Emit("XORL", _ET, _ET)
293 self.Emit("XORL", _EP, _EP)
294 self.Link(_LB_error)
295 self.Emit("MOVQ", _ARG_rb, _CX)
296 self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))
297 self.Emit("MOVQ", jit.Imm(0), _ARG_rb)
298 self.Emit("MOVQ", jit.Imm(0), _ARG_vp)
299 self.Emit("MOVQ", jit.Imm(0), _ARG_sb)
300 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
301 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
302 self.Emit("RET")
303 }
304
305 func (self *_Assembler) prologue() {
306 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
307 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
308 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
309 self.Emit("MOVQ", _AX, _ARG_rb)
310 self.Emit("MOVQ", _BX, _ARG_vp)
311 self.Emit("MOVQ", _CX, _ARG_sb)
312 self.Emit("MOVQ", _DI, _ARG_fv)
313 self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)
314 self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)
315 self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)
316 self.Emit("MOVQ", _BX, _SP_p)
317 self.Emit("MOVQ", _CX, _ST)
318 self.Emit("XORL", _SP_x, _SP_x)
319 self.Emit("XORL", _SP_f, _SP_f)
320 self.Emit("XORL", _SP_q, _SP_q)
321 }
322
323
324
325 func (self *_Assembler) xsave(reg ...obj.Addr) {
326 for i, v := range reg {
327 if i > _FP_saves / 8 - 1 {
328 panic("too many registers to save")
329 } else {
330 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
331 }
332 }
333 }
334
335 func (self *_Assembler) xload(reg ...obj.Addr) {
336 for i, v := range reg {
337 if i > _FP_saves / 8 - 1 {
338 panic("too many registers to load")
339 } else {
340 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
341 }
342 }
343 }
344
345 func (self *_Assembler) rbuf_di() {
346 if _RP.Reg != x86.REG_DI {
347 panic("register allocation messed up: RP != DI")
348 } else {
349 self.Emit("ADDQ", _RL, _RP)
350 }
351 }
352
353 func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) {
354 self.check_size(nd)
355 self.save_c()
356 self.rbuf_di()
357 self.Emit(ins, jit.Ptr(_SP_p, 0), _SI)
358 self.call_c(fn)
359 self.Emit("ADDQ", _AX, _RL)
360 }
361
362 func (self *_Assembler) store_str(s string) {
363 i := 0
364 m := rt.Str2Mem(s)
365
366
367 for i <= len(m) - 8 {
368 self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX)
369 self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i)))
370 i += 8
371 }
372
373
374 if i <= len(m) - 4 {
375 self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
376 i += 4
377 }
378
379
380 if i <= len(m) - 2 {
381 self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i)))
382 i += 2
383 }
384
385
386 if i < len(m) {
387 self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i)))
388 }
389 }
390
391 func (self *_Assembler) check_size(n int) {
392 self.check_size_rl(jit.Ptr(_RL, int64(n)))
393 }
394
395 func (self *_Assembler) check_size_r(r obj.Addr, d int) {
396 self.check_size_rl(jit.Sib(_RL, r, 1, int64(d)))
397 }
398
399 func (self *_Assembler) check_size_rl(v obj.Addr) {
400 idx := self.x
401 key := _LB_more_space_return + strconv.Itoa(idx)
402
403
404 if _LR.Reg != x86.REG_R9 {
405 panic("register allocation messed up: LR != R9")
406 }
407
408
409 self.x++
410 self.Emit("LEAQ", v, _AX)
411 self.Emit("CMPQ", _AX, _RC)
412 self.Sjmp("JBE" , key)
413 self.slice_grow_ax(key)
414 self.Link(key)
415 }
416
417 func (self *_Assembler) slice_grow_ax(ret string) {
418 self.Byte(0x4c, 0x8d, 0x0d)
419 self.Sref(ret, 4)
420 self.Sjmp("JMP" , _LB_more_space)
421 }
422
423
424
425 const (
426 _StateSize = int64(unsafe.Sizeof(_State{}))
427 _StackLimit = _MaxStack * _StateSize
428 )
429
430 func (self *_Assembler) save_state() {
431 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
432 self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9)
433 self.Emit("CMPQ", _R9, jit.Imm(_StackLimit))
434 self.Sjmp("JAE" , _LB_error_too_deep)
435 self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8))
436 self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16))
437 self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24))
438 self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32))
439 self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0))
440 }
441
442 func (self *_Assembler) drop_state(decr int64) {
443 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
444 self.Emit("SUBQ" , jit.Imm(decr), _AX)
445 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
446 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x)
447 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f)
448 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p)
449 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q)
450 self.Emit("PXOR" , _X0, _X0)
451 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
452 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24))
453 }
454
455
456
457 func (self *_Assembler) add_char(ch byte) {
458 self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
459 self.Emit("ADDQ", jit.Imm(1), _RL)
460 }
461
462 func (self *_Assembler) add_long(ch uint32, n int64) {
463 self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0))
464 self.Emit("ADDQ", jit.Imm(n), _RL)
465 }
466
467 func (self *_Assembler) add_text(ss string) {
468 self.store_str(ss)
469 self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL)
470 }
471
472
473 func (self *_Assembler) prep_buffer_AX() {
474 self.Emit("MOVQ", _ARG_rb, _AX)
475 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
476 }
477
478 func (self *_Assembler) save_buffer() {
479 self.Emit("MOVQ", _ARG_rb, _CX)
480 self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0))
481 self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8))
482 self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16))
483 }
484
485
486 func (self *_Assembler) load_buffer_AX() {
487 self.Emit("MOVQ", _ARG_rb, _AX)
488 self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP)
489 self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL)
490 self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC)
491 }
492
493
494
495 func (self *_Assembler) call(pc obj.Addr) {
496 self.Emit("MOVQ", pc, _LR)
497 self.Rjmp("CALL", _LR)
498 }
499
500 func (self *_Assembler) save_c() {
501 self.xsave(_REG_ffi...)
502 }
503
504 func (self *_Assembler) call_b64(pc obj.Addr) {
505 self.xsave(_REG_b64...)
506 self.call(pc)
507 self.xload(_REG_b64...)
508 }
509
510 func (self *_Assembler) call_c(pc obj.Addr) {
511 self.Emit("XCHGQ", _SP_p, _BX)
512 self.call(pc)
513 self.xload(_REG_ffi...)
514 self.Emit("XCHGQ", _SP_p, _BX)
515 }
516
517 func (self *_Assembler) call_go(pc obj.Addr) {
518 self.xsave(_REG_all...)
519 self.call(pc)
520 self.xload(_REG_all...)
521 }
522
523 func (self *_Assembler) call_more_space(pc obj.Addr) {
524 self.xsave(_REG_ms...)
525 self.call(pc)
526 self.xload(_REG_ms...)
527 }
528
529 func (self *_Assembler) call_encoder(pc obj.Addr) {
530 self.xsave(_REG_enc...)
531 self.call(pc)
532 self.xload(_REG_enc...)
533 }
534
535 func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) {
536 switch vt.Kind() {
537 case reflect.Interface : self.call_marshaler_i(fn, it)
538 case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true)
539
540 default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect())
541 }
542 }
543
544 func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) {
545 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
546 self.Emit("TESTQ", _AX, _AX)
547 self.Sjmp("JZ" , "_null_{n}")
548 self.Emit("MOVQ" , _AX, _BX)
549 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX)
550 self.Emit("MOVQ" , jit.Gtype(it), _AX)
551 self.call_go(_F_assertI2I)
552 self.Emit("TESTQ", _AX, _AX)
553 self.Sjmp("JZ" , "_null_{n}")
554 self.Emit("MOVQ", _BX, _CX)
555 self.Emit("MOVQ", _AX, _BX)
556 self.prep_buffer_AX()
557 self.Emit("MOVQ", _ARG_fv, _DI)
558 self.call_go(fn)
559 self.Emit("TESTQ", _ET, _ET)
560 self.Sjmp("JNZ" , _LB_error)
561 self.load_buffer_AX()
562 self.Sjmp("JMP" , "_done_{n}")
563 self.Link("_null_{n}")
564 self.check_size(4)
565 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
566 self.Emit("ADDQ", jit.Imm(4), _RL)
567 self.Link("_done_{n}")
568 }
569
570 func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) {
571 self.prep_buffer_AX()
572 self.Emit("MOVQ", jit.Itab(it, vt), _BX)
573
574
575 if !deref {
576 self.Emit("MOVQ", _SP_p, _CX)
577 } else {
578 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX)
579 }
580
581
582 self.Emit("MOVQ", _ARG_fv, _DI)
583 self.call_go(fn)
584 self.Emit("TESTQ", _ET, _ET)
585 self.Sjmp("JNZ" , _LB_error)
586 self.load_buffer_AX()
587 }
588
589
590
591 var (
592 _T_byte = jit.Type(byteType)
593 _F_growslice = jit.Func(growslice)
594 )
595
596
597 func (self *_Assembler) more_space() {
598 self.Link(_LB_more_space)
599 self.Emit("MOVQ", _RP, _BX)
600 self.Emit("MOVQ", _RL, _CX)
601 self.Emit("MOVQ", _RC, _DI)
602 self.Emit("MOVQ", _AX, _SI)
603 self.Emit("MOVQ", _T_byte, _AX)
604 self.call_more_space(_F_growslice)
605 self.Emit("MOVQ", _AX, _RP)
606 self.Emit("MOVQ", _BX, _RL)
607 self.Emit("MOVQ", _CX, _RC)
608 self.save_buffer()
609 self.Rjmp("JMP" , _LR)
610 }
611
612
613
614 var (
615 _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep))))
616 _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite))))
617 _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType)
618 )
619
620 func (self *_Assembler) error_too_deep() {
621 self.Link(_LB_error_too_deep)
622 self.Emit("MOVQ", _V_ERR_too_deep, _EP)
623 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
624 self.Sjmp("JMP" , _LB_error)
625 }
626
627 func (self *_Assembler) error_invalid_number() {
628 self.Link(_LB_error_invalid_number)
629 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX)
630 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX)
631 self.call_go(_F_error_number)
632 self.Sjmp("JMP" , _LB_error)
633 }
634
635 func (self *_Assembler) error_nan_or_infinite() {
636 self.Link(_LB_error_nan_or_infinite)
637 self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP)
638 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
639 self.Sjmp("JMP" , _LB_error)
640 }
641
642
643
644 var (
645 _F_quote = jit.Imm(int64(native.S_quote))
646 _F_panic = jit.Func(goPanic)
647 )
648
649 func (self *_Assembler) go_panic() {
650 self.Link(_LB_panic)
651 self.Emit("MOVQ", _SP_p, _BX)
652 self.call_go(_F_panic)
653 }
654
655 func (self *_Assembler) encode_string(doubleQuote bool) {
656 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX)
657 self.Emit("TESTQ", _AX, _AX)
658 self.Sjmp("JZ" , "_str_empty_{n}")
659 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
660 self.Sjmp("JNE" , "_str_next_{n}")
661 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
662 self.Sjmp("JMP", _LB_panic)
663 self.Link("_str_next_{n}")
664
665
666 if !doubleQuote {
667 self.check_size_r(_AX, 2)
668 self.add_char('"')
669 } else {
670 self.check_size_r(_AX, 6)
671 self.add_long(_IM_open, 3)
672 }
673
674
675 self.Emit("XORL", _AX, _AX)
676 self.Emit("MOVQ", _AX, _VAR_sp)
677 self.Link("_str_loop_{n}")
678 self.save_c()
679
680
682 self.Emit("MOVQ", _RC, _CX)
683 self.Emit("SUBQ", _RL, _CX)
684 self.Emit("MOVQ", _CX, _VAR_dn)
685 self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX)
686 self.Emit("LEAQ", _VAR_dn, _CX)
687 self.Emit("MOVQ", _VAR_sp, _AX)
688 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI)
689 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI)
690 self.Emit("ADDQ", _AX, _DI)
691 self.Emit("SUBQ", _AX, _SI)
692
693
694 if !doubleQuote {
695 self.Emit("XORL", _R8, _R8)
696 } else {
697 self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
698 }
699
700
701 self.call_c(_F_quote)
702 self.Emit("ADDQ" , _VAR_dn, _RL)
703
704 self.Emit("TESTQ", _AX, _AX)
705 self.Sjmp("JS" , "_str_space_{n}")
706
707
708 if !doubleQuote {
709 self.check_size(1)
710 self.add_char('"')
711 self.Sjmp("JMP", "_str_end_{n}")
712 } else {
713 self.check_size(3)
714 self.add_text("\\\"\"")
715 self.Sjmp("JMP", "_str_end_{n}")
716 }
717
718
719 self.Link("_str_space_{n}")
720 self.Emit("NOTQ", _AX)
721 self.Emit("ADDQ", _AX, _VAR_sp)
722 self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX)
723 self.slice_grow_ax("_str_loop_{n}")
724
725
726 if !doubleQuote {
727 self.Link("_str_empty_{n}")
728 self.check_size(2)
729 self.add_text("\"\"")
730 self.Link("_str_end_{n}")
731 } else {
732 self.Link("_str_empty_{n}")
733 self.check_size(6)
734 self.add_text("\"\\\"\\\"\"")
735 self.Link("_str_end_{n}")
736 }
737 }
738
739
740
741 var (
742 _T_json_Marshaler = rt.UnpackType(jsonMarshalerType)
743 _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType)
744 )
745
746 var (
747 _F_f64toa = jit.Imm(int64(native.S_f64toa))
748 _F_f32toa = jit.Imm(int64(native.S_f32toa))
749 _F_i64toa = jit.Imm(int64(native.S_i64toa))
750 _F_u64toa = jit.Imm(int64(native.S_u64toa))
751 _F_b64encode = jit.Imm(int64(_subr__b64encode))
752 )
753
754 var (
755 _F_memmove = jit.Func(memmove)
756 _F_error_number = jit.Func(error_number)
757 _F_isValidNumber = jit.Func(isValidNumber)
758 )
759
760 var (
761 _F_iteratorStop = jit.Func(iteratorStop)
762 _F_iteratorNext = jit.Func(iteratorNext)
763 _F_iteratorStart = jit.Func(iteratorStart)
764 )
765
766 var (
767 _F_encodeTypedPointer obj.Addr
768 _F_encodeJsonMarshaler obj.Addr
769 _F_encodeTextMarshaler obj.Addr
770 )
771
772 const (
773 _MODE_AVX2 = 1 << 2
774 )
775
776 func init() {
777 _F_encodeTypedPointer = jit.Func(encodeTypedPointer)
778 _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler)
779 _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler)
780 }
781
782 func (self *_Assembler) _asm_OP_null(_ *_Instr) {
783 self.check_size(4)
784 self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0))
785 self.Emit("ADDQ", jit.Imm(4), _RL)
786 }
787
788 func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) {
789 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
790 self.Sjmp("JC", "_empty_arr_{n}")
791 self._asm_OP_null(nil)
792 self.Sjmp("JMP", "_empty_arr_end_{n}")
793 self.Link("_empty_arr_{n}")
794 self.check_size(2)
795 self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0))
796 self.Emit("ADDQ", jit.Imm(2), _RL)
797 self.Link("_empty_arr_end_{n}")
798 }
799
800 func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) {
801 self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv)
802 self.Sjmp("JC", "_empty_obj_{n}")
803 self._asm_OP_null(nil)
804 self.Sjmp("JMP", "_empty_obj_end_{n}")
805 self.Link("_empty_obj_{n}")
806 self.check_size(2)
807 self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0))
808 self.Emit("ADDQ", jit.Imm(2), _RL)
809 self.Link("_empty_obj_end_{n}")
810 }
811
812 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
813 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
814 self.Sjmp("JE" , "_false_{n}")
815 self.check_size(4)
816 self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0))
817 self.Emit("ADDQ", jit.Imm(4), _RL)
818 self.Sjmp("JMP" , "_end_{n}")
819 self.Link("_false_{n}")
820 self.check_size(5)
821 self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0))
822 self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4))
823 self.Emit("ADDQ", jit.Imm(5), _RL)
824 self.Link("_end_{n}")
825 }
826
827 func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
828 self.store_int(4, _F_i64toa, "MOVBQSX")
829 }
830
831 func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
832 self.store_int(6, _F_i64toa, "MOVWQSX")
833 }
834
835 func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
836 self.store_int(17, _F_i64toa, "MOVLQSX")
837 }
838
839 func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
840 self.store_int(21, _F_i64toa, "MOVQ")
841 }
842
843 func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
844 self.store_int(3, _F_u64toa, "MOVBQZX")
845 }
846
847 func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
848 self.store_int(5, _F_u64toa, "MOVWQZX")
849 }
850
851 func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
852 self.store_int(16, _F_u64toa, "MOVLQZX")
853 }
854
855 func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
856 self.store_int(20, _F_u64toa, "MOVQ")
857 }
858
859 func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
860 self.check_size(32)
861 self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX)
862 self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX)
863 self.Emit("XORL" , jit.Imm(_FM_exp32), _AX)
864 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
865 self.save_c()
866 self.rbuf_di()
867 self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0)
868 self.call_c(_F_f32toa)
869 self.Emit("ADDQ" , _AX, _RL)
870 }
871
872 func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
873 self.check_size(32)
874 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
875 self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX)
876 self.Emit("ANDQ" , _CX, _AX)
877 self.Emit("XORQ" , _CX, _AX)
878 self.Sjmp("JZ" , _LB_error_nan_or_infinite)
879 self.save_c()
880 self.rbuf_di()
881 self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0)
882 self.call_c(_F_f64toa)
883 self.Emit("ADDQ" , _AX, _RL)
884 }
885
886 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
887 self.encode_string(false)
888 }
889
890 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
891 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX)
892 self.Emit("ADDQ", jit.Imm(2), _AX)
893 self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX)
894 self.Emit("MOVQ", _DX, _BX)
895 self.From("MULQ", _CX)
896 self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX)
897 self.Emit("ORQ" , jit.Imm(2), _AX)
898 self.Emit("MOVQ", _BX, _DX)
899 self.check_size_r(_AX, 0)
900 self.add_char('"')
901 self.Emit("MOVQ", _ARG_rb, _DI)
902 self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8))
903 self.Emit("MOVQ", _SP_p, _SI)
904
905
906 if !cpu.HasAVX2 {
907 self.Emit("XORL", _DX, _DX)
908 } else {
909 self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX)
910 }
911
912
913 self.call_b64(_F_b64encode)
914 self.load_buffer_AX()
915 self.add_char('"')
916 }
917
918 func (self *_Assembler) _asm_OP_quote(_ *_Instr) {
919 self.encode_string(true)
920 }
921
922 func (self *_Assembler) _asm_OP_number(_ *_Instr) {
923 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)
924 self.Emit("TESTQ", _BX, _BX)
925 self.Sjmp("JZ" , "_empty_{n}")
926 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
927 self.Emit("TESTQ", _AX, _AX)
928 self.Sjmp("JNZ" , "_number_next_{n}")
929 self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX)
930 self.Sjmp("JMP", _LB_panic)
931 self.Link("_number_next_{n}")
932 self.call_go(_F_isValidNumber)
933 self.Emit("CMPB" , _AX, jit.Imm(0))
934 self.Sjmp("JE" , _LB_error_invalid_number)
935 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX)
936 self.check_size_r(_BX, 0)
937 self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX)
938 self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL)
939 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX)
940 self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX)
941 self.call_go(_F_memmove)
942 self.Emit("MOVQ", _ARG_rb, _AX)
943 self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8))
944 self.Sjmp("JMP" , "_done_{n}")
945 self.Link("_empty_{n}")
946 self.check_size(1)
947 self.add_char('0')
948 self.Link("_done_{n}")
949 }
950
951 func (self *_Assembler) _asm_OP_eface(_ *_Instr) {
952 self.prep_buffer_AX()
953 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)
954 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)
955 self.Emit("MOVQ" , _ST, _DI)
956 self.Emit("MOVQ" , _ARG_fv, _SI)
957 self.call_encoder(_F_encodeTypedPointer)
958 self.Emit("TESTQ", _ET, _ET)
959 self.Sjmp("JNZ" , _LB_error)
960 self.load_buffer_AX()
961 }
962
963 func (self *_Assembler) _asm_OP_iface(_ *_Instr) {
964 self.prep_buffer_AX()
965 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX)
966 self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX)
967 self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX)
968 self.Emit("MOVQ" , _ST, _DI)
969 self.Emit("MOVQ" , _ARG_fv, _SI)
970 self.call_encoder(_F_encodeTypedPointer)
971 self.Emit("TESTQ", _ET, _ET)
972 self.Sjmp("JNZ" , _LB_error)
973 self.load_buffer_AX()
974 }
975
976 func (self *_Assembler) _asm_OP_byte(p *_Instr) {
977 self.check_size(1)
978 self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0))
979 self.Emit("ADDQ", jit.Imm(1), _RL)
980 }
981
982 func (self *_Assembler) _asm_OP_text(p *_Instr) {
983 self.check_size(len(p.vs()))
984 self.add_text(p.vs())
985 }
986
987 func (self *_Assembler) _asm_OP_deref(_ *_Instr) {
988 self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p)
989 }
990
991 func (self *_Assembler) _asm_OP_index(p *_Instr) {
992 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
993 self.Emit("ADDQ", _AX, _SP_p)
994 }
995
996 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
997 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
998 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x)
999 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p)
1000 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q)
1001 }
1002
1003 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1004 self.save_state()
1005 }
1006
1007 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1008 self.drop_state(_StateSize)
1009 }
1010
1011 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1012 self.drop_state(_StateSize * 2)
1013 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56))
1014 }
1015
1016 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1017 self.prep_buffer_AX()
1018 vt, pv := p.vp()
1019 self.Emit("MOVQ", jit.Type(vt), _BX)
1020
1021
1022 if !rt.UnpackType(vt).Indirect() {
1023 self.Emit("MOVQ", _SP_p, _CX)
1024 } else {
1025 self.Emit("MOVQ", _SP_p, _VAR_vp)
1026 self.Emit("LEAQ", _VAR_vp, _CX)
1027 }
1028
1029
1030 self.Emit("MOVQ" , _ST, _DI)
1031 self.Emit("MOVQ" , _ARG_fv, _SI)
1032 if pv {
1033 self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI)
1034 }
1035 self.call_encoder(_F_encodeTypedPointer)
1036 self.Emit("TESTQ", _ET, _ET)
1037 self.Sjmp("JNZ" , _LB_error)
1038 self.load_buffer_AX()
1039 }
1040
1041 func (self *_Assembler) _asm_OP_is_nil(p *_Instr) {
1042 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1043 self.Xjmp("JE" , p.vi())
1044 }
1045
1046 func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) {
1047 self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0))
1048 self.Xjmp("JE" , p.vi())
1049 }
1050
1051 func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) {
1052 self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0))
1053 self.Xjmp("JE" , p.vi())
1054 }
1055
1056 func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) {
1057 self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0))
1058 self.Xjmp("JE" , p.vi())
1059 }
1060
1061 func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) {
1062 self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0))
1063 self.Xjmp("JE" , p.vi())
1064 }
1065
1066 func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) {
1067 self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0))
1068 self.Xjmp("JE" , p.vi())
1069 }
1070
1071 func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) {
1072 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX)
1073 self.Emit("TESTQ", _AX, _AX)
1074 self.Xjmp("JZ" , p.vi())
1075 self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0))
1076 self.Xjmp("JE" , p.vi())
1077 }
1078
1079 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1080 self.Xjmp("JMP", p.vi())
1081 }
1082
1083 func (self *_Assembler) _asm_OP_map_iter(p *_Instr) {
1084 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1085 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX)
1086 self.Emit("MOVQ" , _ARG_fv, _CX)
1087 self.call_go(_F_iteratorStart)
1088 self.Emit("MOVQ" , _AX, _SP_q)
1089 self.Emit("MOVQ" , _BX, _ET)
1090 self.Emit("MOVQ" , _CX, _EP)
1091 self.Emit("TESTQ", _ET, _ET)
1092 self.Sjmp("JNZ" , _LB_error)
1093 }
1094
1095 func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) {
1096 self.Emit("MOVQ", _SP_q, _AX)
1097 self.call_go(_F_iteratorStop)
1098 self.Emit("XORL", _SP_q, _SP_q)
1099 }
1100
1101 func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) {
1102 self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p)
1103 self.Emit("TESTQ", _SP_p, _SP_p)
1104 self.Xjmp("JZ" , p.vi())
1105 }
1106
1107 func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) {
1108 self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv)
1109 self.Sjmp("JNC", "_unordered_key_{n}")
1110 self.encode_string(false)
1111 self.Xjmp("JMP", p.vi())
1112 self.Link("_unordered_key_{n}")
1113 }
1114
1115 func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) {
1116 self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p)
1117 self.Emit("MOVQ", _SP_q, _AX)
1118 self.call_go(_F_iteratorNext)
1119 }
1120
1121 func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) {
1122 self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x)
1123 self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p)
1124 self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f)
1125 }
1126
1127 func (self *_Assembler) _asm_OP_slice_next(p *_Instr) {
1128 self.Emit("TESTQ" , _SP_x, _SP_x)
1129 self.Xjmp("JZ" , p.vi())
1130 self.Emit("SUBQ" , jit.Imm(1), _SP_x)
1131 self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f)
1132 self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX)
1133 self.Emit("CMOVQCC", _AX, _SP_p)
1134 }
1135
1136 func (self *_Assembler) _asm_OP_marshal(p *_Instr) {
1137 self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt())
1138 }
1139
1140 func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) {
1141 if p.vk() != reflect.Ptr {
1142 panic("marshal_p: invalid type")
1143 } else {
1144 self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false)
1145 }
1146 }
1147
1148 func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) {
1149 self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt())
1150 }
1151
1152 func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) {
1153 if p.vk() != reflect.Ptr {
1154 panic("marshal_text_p: invalid type")
1155 } else {
1156 self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false)
1157 }
1158 }
1159
1160 func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) {
1161 self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f)
1162 }
1163
1164 func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) {
1165 self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f)
1166 self.Xjmp("JC" , p.vi())
1167 }
1168
1169 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1170 self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX)
1171 self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX)
1172 self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
1173 self.call_go(_F_println)
1174 }
1175
View as plain text