1
2
3
18
19 package decoder
20
21 import (
22 `encoding/json`
23 `fmt`
24 `math`
25 `reflect`
26 `unsafe`
27
28 `github.com/bytedance/sonic/internal/caching`
29 `github.com/bytedance/sonic/internal/jit`
30 `github.com/bytedance/sonic/internal/native`
31 `github.com/bytedance/sonic/internal/native/types`
32 `github.com/bytedance/sonic/internal/rt`
33 `github.com/twitchyliquid64/golang-asm/obj`
34 )
35
36
51
52
66
67 const (
68 _FP_args = 72
69 _FP_fargs = 80
70 _FP_saves = 48
71 _FP_locals = 144
72 )
73
74 const (
75 _FP_offs = _FP_fargs + _FP_saves + _FP_locals
76 _FP_size = _FP_offs + 8
77 _FP_base = _FP_size + 8
78 )
79
80 const (
81 _IM_null = 0x6c6c756e
82 _IM_true = 0x65757274
83 _IM_alse = 0x65736c61
84 )
85
86 const (
87 _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
88 )
89
90 const (
91 _MODE_JSON = 1 << 3
92 )
93
94 const (
95 _LB_error = "_error"
96 _LB_im_error = "_im_error"
97 _LB_eof_error = "_eof_error"
98 _LB_type_error = "_type_error"
99 _LB_field_error = "_field_error"
100 _LB_range_error = "_range_error"
101 _LB_stack_error = "_stack_error"
102 _LB_base64_error = "_base64_error"
103 _LB_unquote_error = "_unquote_error"
104 _LB_parsing_error = "_parsing_error"
105 _LB_parsing_error_v = "_parsing_error_v"
106 _LB_mismatch_error = "_mismatch_error"
107 )
108
109 const (
110 _LB_char_0_error = "_char_0_error"
111 _LB_char_1_error = "_char_1_error"
112 _LB_char_2_error = "_char_2_error"
113 _LB_char_3_error = "_char_3_error"
114 _LB_char_4_error = "_char_4_error"
115 _LB_char_m2_error = "_char_m2_error"
116 _LB_char_m3_error = "_char_m3_error"
117 )
118
119 const (
120 _LB_skip_one = "_skip_one"
121 _LB_skip_key_value = "_skip_key_value"
122 )
123
124 var (
125 _AX = jit.Reg("AX")
126 _BX = jit.Reg("BX")
127 _CX = jit.Reg("CX")
128 _DX = jit.Reg("DX")
129 _DI = jit.Reg("DI")
130 _SI = jit.Reg("SI")
131 _BP = jit.Reg("BP")
132 _SP = jit.Reg("SP")
133 _R8 = jit.Reg("R8")
134 _R9 = jit.Reg("R9")
135 _X0 = jit.Reg("X0")
136 _X1 = jit.Reg("X1")
137 )
138
139 var (
140 _IP = jit.Reg("R10")
141 _IC = jit.Reg("R11")
142 _IL = jit.Reg("R12")
143 _ST = jit.Reg("R13")
144 _VP = jit.Reg("R15")
145 )
146
147 var (
148 _DF = jit.Reg("AX")
149 _ET = jit.Reg("AX")
150 _EP = jit.Reg("BX")
151 )
152
153
154
155 var (
156 _ARG_s = _ARG_sp
157 _ARG_sp = jit.Ptr(_SP, _FP_base + 0)
158 _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
159 _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
160 _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
161 _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
162 _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
163 )
164
165 var (
166 _ARG_sv = _ARG_sv_p
167 _ARG_sv_p = jit.Ptr(_SP, _FP_base + 48)
168 _ARG_sv_n = jit.Ptr(_SP, _FP_base + 56)
169 _ARG_vk = jit.Ptr(_SP, _FP_base + 64)
170 )
171
172 var (
173 _VAR_st = _VAR_st_Vt
174 _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
175 )
176
177 var (
178 _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
179 _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
180 _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
181 _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
182 _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
183 _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
184 )
185
186 var (
187 _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
188 _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
189 _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
190 _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
191 _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
192 )
193
194 var (
195 _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
196 _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
197 _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
198 )
199
200 var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
201
202 var (
203 _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120)
204 _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128)
205 _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136)
206 )
207
208 type _Assembler struct {
209 jit.BaseAssembler
210 p _Program
211 name string
212 }
213
214 func newAssembler(p _Program) *_Assembler {
215 return new(_Assembler).Init(p)
216 }
217
218
219
220 func (self *_Assembler) Load() _Decoder {
221 return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
222 }
223
224 func (self *_Assembler) Init(p _Program) *_Assembler {
225 self.p = p
226 self.BaseAssembler.Init(self.compile)
227 return self
228 }
229
230 func (self *_Assembler) compile() {
231 self.prologue()
232 self.instrs()
233 self.epilogue()
234 self.copy_string()
235 self.escape_string()
236 self.escape_string_twice()
237 self.skip_one()
238 self.skip_key_value()
239 self.type_error()
240 self.mismatch_error()
241 self.field_error()
242 self.range_error()
243 self.stack_error()
244 self.base64_error()
245 self.parsing_error()
246 }
247
248
249
250 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
251 _OP_any : (*_Assembler)._asm_OP_any,
252 _OP_dyn : (*_Assembler)._asm_OP_dyn,
253 _OP_str : (*_Assembler)._asm_OP_str,
254 _OP_bin : (*_Assembler)._asm_OP_bin,
255 _OP_bool : (*_Assembler)._asm_OP_bool,
256 _OP_num : (*_Assembler)._asm_OP_num,
257 _OP_i8 : (*_Assembler)._asm_OP_i8,
258 _OP_i16 : (*_Assembler)._asm_OP_i16,
259 _OP_i32 : (*_Assembler)._asm_OP_i32,
260 _OP_i64 : (*_Assembler)._asm_OP_i64,
261 _OP_u8 : (*_Assembler)._asm_OP_u8,
262 _OP_u16 : (*_Assembler)._asm_OP_u16,
263 _OP_u32 : (*_Assembler)._asm_OP_u32,
264 _OP_u64 : (*_Assembler)._asm_OP_u64,
265 _OP_f32 : (*_Assembler)._asm_OP_f32,
266 _OP_f64 : (*_Assembler)._asm_OP_f64,
267 _OP_unquote : (*_Assembler)._asm_OP_unquote,
268 _OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
269 _OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
270 _OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
271 _OP_deref : (*_Assembler)._asm_OP_deref,
272 _OP_index : (*_Assembler)._asm_OP_index,
273 _OP_is_null : (*_Assembler)._asm_OP_is_null,
274 _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
275 _OP_map_init : (*_Assembler)._asm_OP_map_init,
276 _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
277 _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
278 _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
279 _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
280 _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
281 _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
282 _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
283 _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
284 _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
285 _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
286 _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
287 _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
288 _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
289 _OP_array_skip : (*_Assembler)._asm_OP_array_skip,
290 _OP_array_clear : (*_Assembler)._asm_OP_array_clear,
291 _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
292 _OP_slice_init : (*_Assembler)._asm_OP_slice_init,
293 _OP_slice_append : (*_Assembler)._asm_OP_slice_append,
294 _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
295 _OP_object_next : (*_Assembler)._asm_OP_object_next,
296 _OP_struct_field : (*_Assembler)._asm_OP_struct_field,
297 _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
298 _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
299 _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
300 _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
301 _OP_lspace : (*_Assembler)._asm_OP_lspace,
302 _OP_match_char : (*_Assembler)._asm_OP_match_char,
303 _OP_check_char : (*_Assembler)._asm_OP_check_char,
304 _OP_load : (*_Assembler)._asm_OP_load,
305 _OP_save : (*_Assembler)._asm_OP_save,
306 _OP_drop : (*_Assembler)._asm_OP_drop,
307 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
308 _OP_recurse : (*_Assembler)._asm_OP_recurse,
309 _OP_goto : (*_Assembler)._asm_OP_goto,
310 _OP_switch : (*_Assembler)._asm_OP_switch,
311 _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
312 _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
313 _OP_go_skip : (*_Assembler)._asm_OP_go_skip,
314 _OP_add : (*_Assembler)._asm_OP_add,
315 _OP_check_empty : (*_Assembler)._asm_OP_check_empty,
316 _OP_debug : (*_Assembler)._asm_OP_debug,
317 }
318
319 func (self *_Assembler) _asm_OP_debug(_ *_Instr) {
320 self.Byte(0xcc)
321 }
322
323 func (self *_Assembler) instr(v *_Instr) {
324 if fn := _OpFuncTab[v.op()]; fn != nil {
325 fn(self, v)
326 } else {
327 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
328 }
329 }
330
331 func (self *_Assembler) instrs() {
332 for i, v := range self.p {
333 self.Mark(i)
334 self.instr(&v)
335 self.debug_instr(i, &v)
336 }
337 }
338
339 func (self *_Assembler) epilogue() {
340 self.Mark(len(self.p))
341 self.Emit("XORL", _EP, _EP)
342 self.Emit("MOVQ", _VAR_et, _ET)
343 self.Emit("TESTQ", _ET, _ET)
344 self.Sjmp("JNZ", _LB_mismatch_error)
345 self.Link(_LB_error)
346 self.Emit("MOVQ", _EP, _CX)
347 self.Emit("MOVQ", _ET, _BX)
348 self.Emit("MOVQ", _IC, _AX)
349 self.Emit("MOVQ", jit.Imm(0), _ARG_sp)
350 self.Emit("MOVQ", jit.Imm(0), _ARG_vp)
351 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)
352 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
353 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
354 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
355 self.Emit("RET")
356 }
357
358 func (self *_Assembler) prologue() {
359 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
360 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
361 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
362 self.Emit("MOVQ", _AX, _ARG_sp)
363 self.Emit("MOVQ", _AX, _IP)
364 self.Emit("MOVQ", _BX, _ARG_sl)
365 self.Emit("MOVQ", _BX, _IL)
366 self.Emit("MOVQ", _CX, _ARG_ic)
367 self.Emit("MOVQ", _CX, _IC)
368 self.Emit("MOVQ", _DI, _ARG_vp)
369 self.Emit("MOVQ", _DI, _VP)
370 self.Emit("MOVQ", _SI, _ARG_sb)
371 self.Emit("MOVQ", _SI, _ST)
372 self.Emit("MOVQ", _R8, _ARG_fv)
373 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_p)
374 self.Emit("MOVQ", jit.Imm(0), _ARG_sv_n)
375 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
376 self.Emit("MOVQ", jit.Imm(0), _VAR_et)
377
378 self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)
379 self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)
380 self.Emit("MOVQ", _AX, _VAR_st_Db)
381 }
382
383
384
385 var (
386 _REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
387 _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }
388 )
389
390 func (self *_Assembler) save(r ...obj.Addr) {
391 for i, v := range r {
392 if i > _FP_saves / 8 - 1 {
393 panic("too many registers to save")
394 } else {
395 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
396 }
397 }
398 }
399
400 func (self *_Assembler) load(r ...obj.Addr) {
401 for i, v := range r {
402 if i > _FP_saves / 8 - 1 {
403 panic("too many registers to load")
404 } else {
405 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
406 }
407 }
408 }
409
410 func (self *_Assembler) call(fn obj.Addr) {
411 self.Emit("MOVQ", fn, _R9)
412 self.Rjmp("CALL", _R9)
413 }
414
415 func (self *_Assembler) call_go(fn obj.Addr) {
416 self.save(_REG_go...)
417 self.call(fn)
418 self.load(_REG_go...)
419 }
420
421 func (self *_Assembler) callc(fn obj.Addr) {
422 self.save(_IP)
423 self.call(fn)
424 self.load(_IP)
425 }
426
427 func (self *_Assembler) call_c(fn obj.Addr) {
428 self.Emit("XCHGQ", _IC, _BX)
429 self.callc(fn)
430 self.Emit("XCHGQ", _IC, _BX)
431 }
432
433 func (self *_Assembler) call_sf(fn obj.Addr) {
434 self.Emit("LEAQ", _ARG_s, _DI)
435 self.Emit("MOVQ", _IC, _ARG_ic)
436 self.Emit("LEAQ", _ARG_ic, _SI)
437 self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)
438 self.Emit("MOVQ", _ARG_fv, _CX)
439 self.callc(fn)
440 self.Emit("MOVQ", _ARG_ic, _IC)
441 }
442
443 func (self *_Assembler) call_vf(fn obj.Addr) {
444 self.Emit("LEAQ", _ARG_s, _DI)
445 self.Emit("MOVQ", _IC, _ARG_ic)
446 self.Emit("LEAQ", _ARG_ic, _SI)
447 self.Emit("LEAQ", _VAR_st, _DX)
448 self.callc(fn)
449 self.Emit("MOVQ", _ARG_ic, _IC)
450 }
451
452
453
454 var (
455 _F_convT64 = jit.Func(convT64)
456 _F_error_wrap = jit.Func(error_wrap)
457 _F_error_type = jit.Func(error_type)
458 _F_error_field = jit.Func(error_field)
459 _F_error_value = jit.Func(error_value)
460 _F_error_mismatch = jit.Func(error_mismatch)
461 )
462
463 var (
464 _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
465 _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
466 _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
467 _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
468 _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
469 _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
470 _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
471 )
472
473 var (
474 _T_error = rt.UnpackType(errorType)
475 _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
476 )
477
478 var (
479 _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
480 _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
481 _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
482 )
483
484 func (self *_Assembler) type_error() {
485 self.Link(_LB_type_error)
486 self.call_go(_F_error_type)
487 self.Sjmp("JMP" , _LB_error)
488 }
489
490 func (self *_Assembler) mismatch_error() {
491 self.Link(_LB_mismatch_error)
492 self.Emit("MOVQ", _VAR_et, _ET)
493 self.Emit("MOVQ", _VAR_ic, _EP)
494 self.Emit("MOVQ", _I_json_MismatchTypeError, _CX)
495 self.Emit("CMPQ", _ET, _CX)
496 self.Sjmp("JE" , _LB_error)
497 self.Emit("MOVQ", _ARG_sp, _AX)
498 self.Emit("MOVQ", _ARG_sl, _BX)
499 self.Emit("MOVQ", _VAR_ic, _CX)
500 self.Emit("MOVQ", _VAR_et, _DI)
501 self.call_go(_F_error_mismatch)
502 self.Sjmp("JMP" , _LB_error)
503 }
504
505 func (self *_Assembler) field_error() {
506 self.Link(_LB_field_error)
507 self.Emit("MOVQ", _ARG_sv_p, _AX)
508 self.Emit("MOVQ", _ARG_sv_n, _BX)
509 self.call_go(_F_error_field)
510 self.Sjmp("JMP" , _LB_error)
511 }
512
513 func (self *_Assembler) range_error() {
514 self.Link(_LB_range_error)
515 self.Emit("MOVQ", _ET, _CX)
516 self.slice_from(_VAR_st_Ep, 0)
517 self.Emit("MOVQ", _DI, _AX)
518 self.Emit("MOVQ", _EP, _DI)
519 self.Emit("MOVQ", _SI, _BX)
520 self.call_go(_F_error_value)
521 self.Sjmp("JMP" , _LB_error)
522 }
523
524 func (self *_Assembler) stack_error() {
525 self.Link(_LB_stack_error)
526 self.Emit("MOVQ", _V_stackOverflow, _EP)
527 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
528 self.Sjmp("JMP" , _LB_error)
529 }
530
531 func (self *_Assembler) base64_error() {
532 self.Link(_LB_base64_error)
533 self.Emit("NEGQ", _AX)
534 self.Emit("SUBQ", jit.Imm(1), _AX)
535 self.call_go(_F_convT64)
536 self.Emit("MOVQ", _AX, _EP)
537 self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)
538 self.Sjmp("JMP" , _LB_error)
539 }
540
541 func (self *_Assembler) parsing_error() {
542 self.Link(_LB_eof_error)
543 self.Emit("MOVQ" , _IL, _IC)
544 self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)
545 self.Sjmp("JMP" , _LB_parsing_error)
546 self.Link(_LB_unquote_error)
547 self.Emit("SUBQ" , _VAR_sr, _SI)
548 self.Emit("SUBQ" , _SI, _IC)
549 self.Link(_LB_parsing_error_v)
550 self.Emit("MOVQ" , _AX, _EP)
551 self.Emit("NEGQ" , _EP)
552 self.Sjmp("JMP" , _LB_parsing_error)
553 self.Link(_LB_char_m3_error)
554 self.Emit("SUBQ" , jit.Imm(1), _IC)
555 self.Link(_LB_char_m2_error)
556 self.Emit("SUBQ" , jit.Imm(2), _IC)
557 self.Sjmp("JMP" , _LB_char_0_error)
558 self.Link(_LB_im_error)
559 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))
560 self.Sjmp("JNE" , _LB_char_0_error)
561 self.Emit("SHRL" , jit.Imm(8), _CX)
562 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))
563 self.Sjmp("JNE" , _LB_char_1_error)
564 self.Emit("SHRL" , jit.Imm(8), _CX)
565 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))
566 self.Sjmp("JNE" , _LB_char_2_error)
567 self.Sjmp("JMP" , _LB_char_3_error)
568 self.Link(_LB_char_4_error)
569 self.Emit("ADDQ" , jit.Imm(1), _IC)
570 self.Link(_LB_char_3_error)
571 self.Emit("ADDQ" , jit.Imm(1), _IC)
572 self.Link(_LB_char_2_error)
573 self.Emit("ADDQ" , jit.Imm(1), _IC)
574 self.Link(_LB_char_1_error)
575 self.Emit("ADDQ" , jit.Imm(1), _IC)
576 self.Link(_LB_char_0_error)
577 self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)
578 self.Link(_LB_parsing_error)
579 self.Emit("MOVQ" , _EP, _DI)
580 self.Emit("MOVQ", _ARG_sp, _AX)
581 self.Emit("MOVQ", _ARG_sl, _BX)
582 self.Emit("MOVQ" , _IC, _CX)
583 self.call_go(_F_error_wrap)
584 self.Sjmp("JMP" , _LB_error)
585 }
586
587 func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
588 self.Emit("MOVQ", _IC, _VAR_ic)
589 self.Emit("MOVQ", jit.Type(p.vt()), _ET)
590 self.Emit("MOVQ", _ET, _VAR_et)
591 }
592
593 func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
594 self.Byte(0x4c, 0x8d, 0x0d)
595 self.Xref(p.vi(), 4)
596
597 self.Emit("MOVQ", _R9, _VAR_pc)
598 self.Sjmp("JMP" , _LB_skip_one)
599 }
600
601 func (self *_Assembler) skip_one() {
602 self.Link(_LB_skip_one)
603 self.Emit("MOVQ", _VAR_ic, _IC)
604 self.call_sf(_F_skip_one)
605 self.Emit("TESTQ", _AX, _AX)
606 self.Sjmp("JS" , _LB_parsing_error_v)
607 self.Emit("MOVQ" , _VAR_pc, _R9)
608
609 self.Rjmp("JMP" , _R9)
610 }
611
612 func (self *_Assembler) skip_key_value() {
613 self.Link(_LB_skip_key_value)
614
615 self.Emit("MOVQ", _VAR_ic, _IC)
616 self.call_sf(_F_skip_one)
617 self.Emit("TESTQ", _AX, _AX)
618 self.Sjmp("JS" , _LB_parsing_error_v)
619
620 self.lspace("_global_1")
621 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
622 self.Sjmp("JNE" , _LB_parsing_error_v)
623 self.Emit("ADDQ", jit.Imm(1), _IC)
624 self.lspace("_global_2")
625
626 self.call_sf(_F_skip_one)
627 self.Emit("TESTQ", _AX, _AX)
628 self.Sjmp("JS" , _LB_parsing_error_v)
629
630 self.Emit("MOVQ" , _VAR_pc, _R9)
631 self.Rjmp("JMP" , _R9)
632 }
633
634
635
636
637 var (
638 _T_byte = jit.Type(byteType)
639 _F_mallocgc = jit.Func(mallocgc)
640 )
641
642 func (self *_Assembler) malloc_AX(nb obj.Addr, ret obj.Addr) {
643 self.Emit("MOVQ", nb, _AX)
644 self.Emit("MOVQ", _T_byte, _BX)
645 self.Emit("XORL", _CX, _CX)
646 self.call_go(_F_mallocgc)
647 self.Emit("MOVQ", _AX, ret)
648 }
649
650 func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
651 self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)
652 self.Emit("MOVQ", jit.Type(vt), _BX)
653 self.Emit("MOVB", jit.Imm(1), _CX)
654 self.call_go(_F_mallocgc)
655 self.Emit("MOVQ", _AX, ret)
656 }
657
658 func (self *_Assembler) valloc_AX(vt reflect.Type) {
659 self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)
660 self.Emit("MOVQ", jit.Type(vt), _BX)
661 self.Emit("MOVB", jit.Imm(1), _CX)
662 self.call_go(_F_mallocgc)
663 }
664
665 func (self *_Assembler) vfollow(vt reflect.Type) {
666 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
667 self.Emit("TESTQ", _AX, _AX)
668 self.Sjmp("JNZ" , "_end_{n}")
669 self.valloc_AX(vt)
670 self.WritePtrAX(1, jit.Ptr(_VP, 0), true)
671 self.Link("_end_{n}")
672 self.Emit("MOVQ" , _AX, _VP)
673 }
674
675
676
677 var (
678 _F_vstring = jit.Imm(int64(native.S_vstring))
679 _F_vnumber = jit.Imm(int64(native.S_vnumber))
680 _F_vsigned = jit.Imm(int64(native.S_vsigned))
681 _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
682 )
683
684 func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
685 self.Emit("MOVQ" , _VAR_st_Vt, _AX)
686 self.Emit("TESTQ", _AX, _AX)
687
688 if vt != nil {
689 self.Sjmp("JNS" , "_check_err_{n}")
690 self.Emit("MOVQ", jit.Type(vt), _ET)
691 self.Emit("MOVQ", _ET, _VAR_et)
692 if pin2 != -1 {
693 self.Emit("SUBQ", jit.Imm(1), _BX)
694 self.Emit("MOVQ", _BX, _VAR_ic)
695 self.Byte(0x4c , 0x8d, 0x0d)
696 self.Xref(pin2, 4)
697 self.Emit("MOVQ", _R9, _VAR_pc)
698 self.Sjmp("JMP" , _LB_skip_key_value)
699 } else {
700 self.Emit("MOVQ", _BX, _VAR_ic)
701 self.Byte(0x4c , 0x8d, 0x0d)
702 self.Sref(pin, 4)
703 self.Emit("MOVQ", _R9, _VAR_pc)
704 self.Sjmp("JMP" , _LB_skip_one)
705 }
706 self.Link("_check_err_{n}")
707 } else {
708 self.Sjmp("JS" , _LB_parsing_error_v)
709 }
710 }
711
712 func (self *_Assembler) check_eof(d int64) {
713 if d == 1 {
714 self.Emit("CMPQ", _IC, _IL)
715 self.Sjmp("JAE" , _LB_eof_error)
716 } else {
717 self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)
718 self.Emit("CMPQ", _AX, _IL)
719 self.Sjmp("JA" , _LB_eof_error)
720 }
721 }
722
723
724 func (self *_Assembler) parse_string() {
725 self.Emit("MOVQ", _ARG_fv, _CX)
726 self.call_vf(_F_vstring)
727 self.check_err(nil, "", -1)
728 }
729
730 func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
731 self.Emit("MOVQ", _IC, _BX)
732 self.call_vf(_F_vnumber)
733 self.check_err(vt, pin, pin2)
734 }
735
736 func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
737 self.Emit("MOVQ", _IC, _BX)
738 self.call_vf(_F_vsigned)
739 self.check_err(vt, pin, pin2)
740 }
741
742 func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
743 self.Emit("MOVQ", _IC, _BX)
744 self.call_vf(_F_vunsigned)
745 self.check_err(vt, pin, pin2)
746 }
747
748
749 func (self *_Assembler) copy_string() {
750 self.Link("_copy_string")
751 self.Emit("MOVQ", _DI, _VAR_bs_p)
752 self.Emit("MOVQ", _SI, _VAR_bs_n)
753 self.Emit("MOVQ", _R9, _VAR_bs_LR)
754 self.malloc_AX(_SI, _ARG_sv_p)
755 self.Emit("MOVQ", _VAR_bs_p, _BX)
756 self.Emit("MOVQ", _VAR_bs_n, _CX)
757 self.call_go(_F_memmove)
758 self.Emit("MOVQ", _ARG_sv_p, _DI)
759 self.Emit("MOVQ", _VAR_bs_n, _SI)
760 self.Emit("MOVQ", _VAR_bs_LR, _R9)
761 self.Rjmp("JMP", _R9)
762 }
763
764
765 func (self *_Assembler) escape_string() {
766 self.Link("_escape_string")
767 self.Emit("MOVQ" , _DI, _VAR_bs_p)
768 self.Emit("MOVQ" , _SI, _VAR_bs_n)
769 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
770 self.malloc_AX(_SI, _DX)
771 self.Emit("MOVQ" , _DX, _ARG_sv_p)
772 self.Emit("MOVQ" , _VAR_bs_p, _DI)
773 self.Emit("MOVQ" , _VAR_bs_n, _SI)
774 self.Emit("LEAQ" , _VAR_sr, _CX)
775 self.Emit("XORL" , _R8, _R8)
776 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
777 self.Emit("SETCC", _R8)
778 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)
779 self.call_c(_F_unquote)
780 self.Emit("MOVQ" , _VAR_bs_n, _SI)
781 self.Emit("ADDQ" , jit.Imm(1), _SI)
782 self.Emit("TESTQ", _AX, _AX)
783 self.Sjmp("JS" , _LB_unquote_error)
784 self.Emit("MOVQ" , _AX, _SI)
785 self.Emit("MOVQ" , _ARG_sv_p, _DI)
786 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
787 self.Rjmp("JMP", _R9)
788 }
789
790 func (self *_Assembler) escape_string_twice() {
791 self.Link("_escape_string_twice")
792 self.Emit("MOVQ" , _DI, _VAR_bs_p)
793 self.Emit("MOVQ" , _SI, _VAR_bs_n)
794 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
795 self.malloc_AX(_SI, _DX)
796 self.Emit("MOVQ" , _DX, _ARG_sv_p)
797 self.Emit("MOVQ" , _VAR_bs_p, _DI)
798 self.Emit("MOVQ" , _VAR_bs_n, _SI)
799 self.Emit("LEAQ" , _VAR_sr, _CX)
800 self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
801 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
802 self.Emit("XORL" , _AX, _AX)
803 self.Emit("SETCC", _AX)
804 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)
805 self.Emit("ORQ" , _AX, _R8)
806 self.call_c(_F_unquote)
807 self.Emit("MOVQ" , _VAR_bs_n, _SI)
808 self.Emit("ADDQ" , jit.Imm(3), _SI)
809 self.Emit("TESTQ", _AX, _AX)
810 self.Sjmp("JS" , _LB_unquote_error)
811 self.Emit("MOVQ" , _AX, _SI)
812 self.Emit("MOVQ" , _ARG_sv_p, _DI)
813 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
814 self.Rjmp("JMP", _R9)
815 }
816
817
818
819 var (
820 _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
821 _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
822 )
823
824 var (
825 _Vp_max_f32 = new(float32)
826 _Vp_min_f32 = new(float32)
827 )
828
829 func init() {
830 *_Vp_max_f32 = math.MaxFloat32
831 *_Vp_min_f32 = -math.MaxFloat32
832 }
833
834 func (self *_Assembler) range_single_X0() {
835 self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)
836 self.Emit("MOVQ" , _V_max_f32, _CX)
837 self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET)
838 self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP)
839 self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)
840 self.Sjmp("JA" , _LB_range_error)
841 self.Emit("MOVQ" , _V_min_f32, _CX)
842 self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0)
843 self.Sjmp("JB" , _LB_range_error)
844 }
845
846 func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
847 self.Emit("MOVQ", _VAR_st_Iv, _CX)
848 self.Emit("MOVQ", jit.Gitab(i), _ET)
849 self.Emit("MOVQ", jit.Gtype(t), _EP)
850 self.Emit("CMPQ", _CX, jit.Imm(a))
851 self.Sjmp("JL" , _LB_range_error)
852 self.Emit("CMPQ", _CX, jit.Imm(b))
853 self.Sjmp("JG" , _LB_range_error)
854 }
855
856 func (self *_Assembler) range_unsigned_CX(i *rt.GoItab, t *rt.GoType, v uint64) {
857 self.Emit("MOVQ" , _VAR_st_Iv, _CX)
858 self.Emit("MOVQ" , jit.Gitab(i), _ET)
859 self.Emit("MOVQ" , jit.Gtype(t), _EP)
860 self.Emit("TESTQ", _CX, _CX)
861 self.Sjmp("JS" , _LB_range_error)
862 self.Emit("CMPQ" , _CX, jit.Imm(int64(v)))
863 self.Sjmp("JA" , _LB_range_error)
864 }
865
866
867
868 var (
869 _F_unquote = jit.Imm(int64(native.S_unquote))
870 )
871
872 func (self *_Assembler) slice_from(p obj.Addr, d int64) {
873 self.Emit("MOVQ", p, _SI)
874 self.slice_from_r(_SI, d)
875 }
876
877 func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
878 self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)
879 self.Emit("NEGQ", p)
880 self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)
881 }
882
883 func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
884 self.slice_from(_VAR_st_Iv, -1)
885 self.Emit("CMPQ", _VAR_st_Ep, jit.Imm(-1))
886 self.Sjmp("JE" , "_noescape_{n}")
887 self.Byte(0x4c, 0x8d, 0x0d)
888 self.Sref("_unquote_once_write_{n}", 4)
889 self.Sjmp("JMP" , "_escape_string")
890 self.Link("_noescape_{n}")
891 if copy {
892 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
893 self.Sjmp("JNC", "_unquote_once_write_{n}")
894 self.Byte(0x4c, 0x8d, 0x0d)
895 self.Sref("_unquote_once_write_{n}", 4)
896 self.Sjmp("JMP", "_copy_string")
897 }
898 self.Link("_unquote_once_write_{n}")
899 self.Emit("MOVQ", _SI, n)
900 if stack {
901 self.Emit("MOVQ", _DI, p)
902 } else {
903 self.WriteRecNotAX(10, _DI, p, false, false)
904 }
905 }
906
907 func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
908 self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))
909 self.Sjmp("JE" , _LB_eof_error)
910 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))
911 self.Sjmp("JNE" , _LB_char_m3_error)
912 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))
913 self.Sjmp("JNE" , _LB_char_m2_error)
914 self.slice_from(_VAR_st_Iv, -3)
915 self.Emit("MOVQ" , _SI, _AX)
916 self.Emit("ADDQ" , _VAR_st_Iv, _AX)
917 self.Emit("CMPQ" , _VAR_st_Ep, _AX)
918 self.Sjmp("JE" , "_noescape_{n}")
919 self.Byte(0x4c, 0x8d, 0x0d)
920 self.Sref("_unquote_twice_write_{n}", 4)
921 self.Sjmp("JMP" , "_escape_string_twice")
922 self.Link("_noescape_{n}")
923 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
924 self.Sjmp("JNC", "_unquote_twice_write_{n}")
925 self.Byte(0x4c, 0x8d, 0x0d)
926 self.Sref("_unquote_twice_write_{n}", 4)
927 self.Sjmp("JMP", "_copy_string")
928 self.Link("_unquote_twice_write_{n}")
929 self.Emit("MOVQ" , _SI, n)
930 if stack {
931 self.Emit("MOVQ", _DI, p)
932 } else {
933 self.WriteRecNotAX(12, _DI, p, false, false)
934 }
935 self.Link("_unquote_twice_end_{n}")
936 }
937
938
939
940 var (
941 _F_memclrHasPointers = jit.Func(memclrHasPointers)
942 _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
943 )
944
945 func (self *_Assembler) mem_clear_fn(ptrfree bool) {
946 if !ptrfree {
947 self.call_go(_F_memclrHasPointers)
948 } else {
949 self.call_go(_F_memclrNoHeapPointers)
950 }
951 }
952
953 func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
954 self.Emit("MOVQ", jit.Imm(size), _BX)
955 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
956 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)
957 self.Emit("SUBQ", _VP, _AX)
958 self.Emit("ADDQ", _AX, _BX)
959 self.Emit("MOVQ", _VP, _AX)
960 self.mem_clear_fn(ptrfree)
961 }
962
963
964
965 var (
966 _F_mapassign = jit.Func(mapassign)
967 _F_mapassign_fast32 = jit.Func(mapassign_fast32)
968 _F_mapassign_faststr = jit.Func(mapassign_faststr)
969 _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
970 )
971
972 var (
973 _F_decodeJsonUnmarshaler obj.Addr
974 _F_decodeTextUnmarshaler obj.Addr
975 )
976
977 func init() {
978 _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
979 _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
980 }
981
982 func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
983 if rt.MapType(rt.UnpackType(t)).IndirectElem() {
984 self.vfollow(t.Elem())
985 }
986 }
987
988 func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
989 self.Emit("LEAQ", v, _AX)
990 self.mapassign_call_from_AX(t, _F_mapassign)
991 }
992
993 func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
994 self.Emit("MOVQ", jit.Type(t), _AX)
995 self.Emit("MOVQ", _VP, _BX)
996 self.Emit("MOVQ", p, _CX)
997 self.Emit("MOVQ", n, _DI)
998 self.call_go(_F_mapassign_faststr)
999 self.Emit("MOVQ", _AX, _VP)
1000 self.mapaccess_ptr(t)
1001 }
1002
1003 func (self *_Assembler) mapassign_call_from_AX(t reflect.Type, fn obj.Addr) {
1004 self.Emit("MOVQ", _AX, _CX)
1005 self.Emit("MOVQ", jit.Type(t), _AX)
1006 self.Emit("MOVQ", _VP, _BX)
1007 self.call_go(fn)
1008 self.Emit("MOVQ", _AX, _VP)
1009 }
1010
1011 func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
1012 self.mapassign_call_from_AX(t, fn)
1013 self.mapaccess_ptr(t)
1014 }
1015
1016 func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
1017 pv := false
1018 vk := t.Key()
1019 tk := t.Key()
1020
1021
1022 if vk.Kind() == reflect.Ptr {
1023 pv = true
1024 vk = vk.Elem()
1025 }
1026
1027
1028 if addressable {
1029 pv = false
1030 tk = reflect.PtrTo(tk)
1031 }
1032
1033
1034 self.valloc(vk, _BX)
1035
1036 self.Emit("MOVQ" , _BX, _ARG_vk)
1037 self.Emit("MOVQ" , jit.Type(tk), _AX)
1038 self.Emit("MOVQ" , _ARG_sv_p, _CX)
1039 self.Emit("MOVQ" , _ARG_sv_n, _DI)
1040 self.call_go(_F_decodeTextUnmarshaler)
1041 self.Emit("TESTQ", _ET, _ET)
1042 self.Sjmp("JNZ" , _LB_error)
1043 self.Emit("MOVQ" , _ARG_vk, _AX)
1044 self.Emit("MOVQ", jit.Imm(0), _ARG_vk)
1045
1046
1047 if !pv {
1048 self.mapassign_call_from_AX(t, _F_mapassign)
1049 } else {
1050 self.mapassign_fastx(t, _F_mapassign_fast64ptr)
1051 }
1052 }
1053
1054
1055
1056 var (
1057 _F_skip_one = jit.Imm(int64(native.S_skip_one))
1058 _F_skip_array = jit.Imm(int64(native.S_skip_array))
1059 _F_skip_object = jit.Imm(int64(native.S_skip_object))
1060 _F_skip_number = jit.Imm(int64(native.S_skip_number))
1061 )
1062
1063 func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
1064 self.call_sf(_F_skip_one)
1065 self.Emit("TESTQ", _AX, _AX)
1066 self.Sjmp("JS" , _LB_parsing_error_v)
1067 self.slice_from_r(_AX, 0)
1068 self.Emit("MOVQ" , _DI, _ARG_sv_p)
1069 self.Emit("MOVQ" , _SI, _ARG_sv_n)
1070 self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)
1071 }
1072
1073 func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
1074 self.parse_string()
1075 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1076 self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)
1077 }
1078
1079 func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
1080 pt := t
1081 vk := t.Kind()
1082
1083
1084 if deref && vk == reflect.Ptr {
1085 self.Emit("MOVQ" , _VP, _BX)
1086 self.Emit("MOVQ" , jit.Ptr(_BX, 0), _BX)
1087 self.Emit("TESTQ", _BX, _BX)
1088 self.Sjmp("JNZ" , "_deref_{n}")
1089 self.valloc(t.Elem(), _BX)
1090 self.WriteRecNotAX(3, _BX, jit.Ptr(_VP, 0), false, false)
1091 self.Link("_deref_{n}")
1092 } else {
1093
1094 self.Emit("MOVQ", _VP, _BX)
1095 }
1096
1097
1098 self.Emit("MOVQ", jit.Type(pt), _AX)
1099
1100
1101 self.Emit("MOVQ" , _ARG_sv_p, _CX)
1102 self.Emit("MOVQ" , _ARG_sv_n, _DI)
1103 self.call_go(fn)
1104 self.Emit("TESTQ", _ET, _ET)
1105 self.Sjmp("JNZ" , _LB_error)
1106 }
1107
1108
1109
1110 var (
1111 _F_decodeTypedPointer obj.Addr
1112 )
1113
1114 func init() {
1115 _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
1116 }
1117
1118 func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
1119 self.Emit("MOVQ" , vp, _SI)
1120 self.Emit("MOVQ" , vt, _DI)
1121 self.Emit("MOVQ", _ARG_sp, _AX)
1122 self.Emit("MOVQ", _ARG_sl, _BX)
1123 self.Emit("MOVQ" , _IC, _CX)
1124 self.Emit("MOVQ" , _ST, _R8)
1125 self.Emit("MOVQ" , _ARG_fv, _R9)
1126 self.save(_REG_rt...)
1127 self.Emit("MOVQ", _F_decodeTypedPointer, _IL)
1128 self.Rjmp("CALL", _IL)
1129 self.load(_REG_rt...)
1130 self.Emit("MOVQ" , _AX, _IC)
1131 self.Emit("MOVQ" , _BX, _ET)
1132 self.Emit("MOVQ" , _CX, _EP)
1133 self.Emit("TESTQ", _ET, _ET)
1134 self.Sjmp("JE", "_decode_dynamic_end_{n}")
1135 self.Emit("MOVQ", _I_json_MismatchTypeError, _CX)
1136 self.Emit("CMPQ", _ET, _CX)
1137 self.Sjmp("JNE", _LB_error)
1138 self.Emit("MOVQ", _EP, _VAR_ic)
1139 self.Emit("MOVQ", _ET, _VAR_et)
1140 self.Link("_decode_dynamic_end_{n}")
1141 }
1142
1143
1144
1145 var (
1146 _F_memequal = jit.Func(memequal)
1147 _F_memmove = jit.Func(memmove)
1148 _F_growslice = jit.Func(growslice)
1149 _F_makeslice = jit.Func(makeslice)
1150 _F_makemap_small = jit.Func(makemap_small)
1151 _F_mapassign_fast64 = jit.Func(mapassign_fast64)
1152 )
1153
1154 var (
1155 _F_lspace = jit.Imm(int64(native.S_lspace))
1156 _F_strhash = jit.Imm(int64(caching.S_strhash))
1157 )
1158
1159 var (
1160 _F_b64decode = jit.Imm(int64(_subr__b64decode))
1161 _F_decodeValue = jit.Imm(int64(_subr_decode_value))
1162 )
1163
1164 var (
1165 _F_FieldMap_GetCaseInsensitive obj.Addr
1166 _Empty_Slice = []byte{}
1167 _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
1168 )
1169
1170 const (
1171 _MODE_AVX2 = 1 << 2
1172 )
1173
1174 const (
1175 _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
1176 _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
1177 _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
1178 )
1179
1180 const (
1181 _Vk_Ptr = int64(reflect.Ptr)
1182 _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
1183 )
1184
1185 func init() {
1186 _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
1187 }
1188
1189 func (self *_Assembler) _asm_OP_any(_ *_Instr) {
1190 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)
1191 self.Emit("TESTQ" , _CX, _CX)
1192 self.Sjmp("JZ" , "_decode_{n}")
1193 self.Emit("CMPQ" , _CX, _VP)
1194 self.Sjmp("JE" , "_decode_{n}")
1195 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1196 self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)
1197 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1198 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1199 self.Sjmp("JNE" , "_decode_{n}")
1200 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1201 self.decode_dynamic(_AX, _DI)
1202 self.Sjmp("JMP" , "_decode_end_{n}")
1203 self.Link("_decode_{n}")
1204 self.Emit("MOVQ" , _ARG_fv, _DF)
1205 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0))
1206 self.call(_F_decodeValue)
1207 self.Emit("MOVQ" , jit.Imm(0), jit.Ptr(_SP, 0))
1208 self.Emit("TESTQ" , _EP, _EP)
1209 self.Sjmp("JNZ" , _LB_parsing_error)
1210 self.Link("_decode_end_{n}")
1211 }
1212
1213 func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
1214 self.Emit("MOVQ" , jit.Type(p.vt()), _ET)
1215 self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0))
1216 self.Sjmp("JE" , _LB_type_error)
1217 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _CX)
1218 self.Emit("MOVQ" , jit.Ptr(_CX, 8), _CX)
1219 self.Emit("MOVBLZX", jit.Ptr(_CX, _Gt_KindFlags), _DX)
1220 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1221 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1222 self.Sjmp("JNE" , _LB_type_error)
1223 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1224 self.decode_dynamic(_CX, _DI)
1225 self.Link("_decode_end_{n}")
1226 }
1227
1228 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
1229 self.parse_string()
1230 self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)
1231 }
1232
1233 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
1234 self.parse_string()
1235 self.slice_from(_VAR_st_Iv, -1)
1236 self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))
1237 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))
1238 self.Emit("SHRQ" , jit.Imm(2), _SI)
1239 self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)
1240 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))
1241 self.malloc_AX(_SI, _SI)
1242
1243
1244 self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)
1245
1246
1247 self.Emit("XORL" , _DX, _DX)
1248 self.Emit("MOVQ" , _VP, _DI)
1249
1250 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R8)
1251 self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)
1252 self.Emit("MOVQ" , _R8, _SI)
1253
1254 self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))
1255 self.call_c(_F_b64decode)
1256 self.Emit("TESTQ", _AX, _AX)
1257 self.Sjmp("JS" , _LB_base64_error)
1258 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1259 }
1260
1261 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
1262 self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)
1263 self.Emit("CMPQ", _AX, _IL)
1264 self.Sjmp("JA" , _LB_eof_error)
1265 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))
1266 self.Sjmp("JE" , "_false_{n}")
1267 self.Emit("MOVL", jit.Imm(_IM_true), _CX)
1268 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1269 self.Sjmp("JE" , "_bool_true_{n}")
1270
1271 self.Emit("MOVQ", _IC, _VAR_ic)
1272 self.Emit("MOVQ", _T_bool, _ET)
1273 self.Emit("MOVQ", _ET, _VAR_et)
1274 self.Byte(0x4c, 0x8d, 0x0d)
1275 self.Sref("_end_{n}", 4)
1276 self.Emit("MOVQ", _R9, _VAR_pc)
1277 self.Sjmp("JMP" , _LB_skip_one)
1278
1279 self.Link("_bool_true_{n}")
1280 self.Emit("MOVQ", _AX, _IC)
1281 self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))
1282 self.Sjmp("JMP" , "_end_{n}")
1283 self.Link("_false_{n}")
1284 self.Emit("ADDQ", jit.Imm(1), _AX)
1285 self.Emit("ADDQ", jit.Imm(1), _IC)
1286 self.Emit("CMPQ", _AX, _IL)
1287 self.Sjmp("JA" , _LB_eof_error)
1288 self.Emit("MOVL", jit.Imm(_IM_alse), _CX)
1289 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1290 self.Sjmp("JNE" , _LB_im_error)
1291 self.Emit("MOVQ", _AX, _IC)
1292 self.Emit("XORL", _AX, _AX)
1293 self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))
1294 self.Link("_end_{n}")
1295 }
1296
1297 func (self *_Assembler) _asm_OP_num(_ *_Instr) {
1298 self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
1299 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1300 self.Emit("MOVQ", _IC, _BX)
1301 self.Sjmp("JNE", "_skip_number_{n}")
1302 self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
1303 self.Emit("ADDQ", jit.Imm(1), _IC)
1304 self.Link("_skip_number_{n}")
1305
1306
1307 self.Emit("LEAQ", _ARG_s, _DI)
1308 self.Emit("MOVQ", _IC, _ARG_ic)
1309 self.Emit("LEAQ", _ARG_ic, _SI)
1310 self.callc(_F_skip_number)
1311 self.Emit("MOVQ", _ARG_ic, _IC)
1312 self.Emit("TESTQ", _AX, _AX)
1313 self.Sjmp("JNS" , "_num_next_{n}")
1314
1315
1316 self.Emit("MOVQ", _BX, _VAR_ic)
1317 self.Emit("MOVQ", _T_number, _ET)
1318 self.Emit("MOVQ", _ET, _VAR_et)
1319 self.Byte(0x4c, 0x8d, 0x0d)
1320 self.Sref("_num_end_{n}", 4)
1321 self.Emit("MOVQ", _R9, _VAR_pc)
1322 self.Sjmp("JMP" , _LB_skip_one)
1323
1324
1325 self.Link("_num_next_{n}")
1326 self.slice_from_r(_AX, 0)
1327 self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
1328 self.Sjmp("JNC", "_num_write_{n}")
1329 self.Byte(0x4c, 0x8d, 0x0d)
1330 self.Sref("_num_write_{n}", 4)
1331 self.Sjmp("JMP", "_copy_string")
1332 self.Link("_num_write_{n}")
1333 self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))
1334 self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
1335 self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
1336 self.Sjmp("JNE", "_num_end_{n}")
1337 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1338 self.Sjmp("JNE", _LB_char_0_error)
1339 self.Emit("ADDQ", jit.Imm(1), _IC)
1340 self.Link("_num_end_{n}")
1341 }
1342
1343 func (self *_Assembler) _asm_OP_i8(_ *_Instr) {
1344 var pin = "_i8_end_{n}"
1345 self.parse_signed(int8Type, pin, -1)
1346 self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1347 self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))
1348 self.Link(pin)
1349 }
1350
1351 func (self *_Assembler) _asm_OP_i16(_ *_Instr) {
1352 var pin = "_i16_end_{n}"
1353 self.parse_signed(int16Type, pin, -1)
1354 self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1355 self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))
1356 self.Link(pin)
1357 }
1358
1359 func (self *_Assembler) _asm_OP_i32(_ *_Instr) {
1360 var pin = "_i32_end_{n}"
1361 self.parse_signed(int32Type, pin, -1)
1362 self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1363 self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))
1364 self.Link(pin)
1365 }
1366
1367 func (self *_Assembler) _asm_OP_i64(_ *_Instr) {
1368 var pin = "_i64_end_{n}"
1369 self.parse_signed(int64Type, pin, -1)
1370 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1371 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1372 self.Link(pin)
1373 }
1374
1375 func (self *_Assembler) _asm_OP_u8(_ *_Instr) {
1376 var pin = "_u8_end_{n}"
1377 self.parse_unsigned(uint8Type, pin, -1)
1378 self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)
1379 self.Emit("MOVB", _CX, jit.Ptr(_VP, 0))
1380 self.Link(pin)
1381 }
1382
1383 func (self *_Assembler) _asm_OP_u16(_ *_Instr) {
1384 var pin = "_u16_end_{n}"
1385 self.parse_unsigned(uint16Type, pin, -1)
1386 self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)
1387 self.Emit("MOVW", _CX, jit.Ptr(_VP, 0))
1388 self.Link(pin)
1389 }
1390
1391 func (self *_Assembler) _asm_OP_u32(_ *_Instr) {
1392 var pin = "_u32_end_{n}"
1393 self.parse_unsigned(uint32Type, pin, -1)
1394 self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)
1395 self.Emit("MOVL", _CX, jit.Ptr(_VP, 0))
1396 self.Link(pin)
1397 }
1398
1399 func (self *_Assembler) _asm_OP_u64(_ *_Instr) {
1400 var pin = "_u64_end_{n}"
1401 self.parse_unsigned(uint64Type, pin, -1)
1402 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1403 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1404 self.Link(pin)
1405 }
1406
1407 func (self *_Assembler) _asm_OP_f32(_ *_Instr) {
1408 var pin = "_f32_end_{n}"
1409 self.parse_number(float32Type, pin, -1)
1410 self.range_single_X0()
1411 self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))
1412 self.Link(pin)
1413 }
1414
1415 func (self *_Assembler) _asm_OP_f64(_ *_Instr) {
1416 var pin = "_f64_end_{n}"
1417 self.parse_number(float64Type, pin, -1)
1418 self.Emit("MOVSD", _VAR_st_Dv, _X0)
1419 self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))
1420 self.Link(pin)
1421 }
1422
1423 func (self *_Assembler) _asm_OP_unquote(_ *_Instr) {
1424 self.check_eof(2)
1425 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))
1426 self.Sjmp("JNE" , _LB_char_0_error)
1427 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))
1428 self.Sjmp("JNE" , _LB_char_1_error)
1429 self.Emit("ADDQ", jit.Imm(2), _IC)
1430 self.parse_string()
1431 self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)
1432 }
1433
1434 func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
1435 self.Emit("XORL", _AX, _AX)
1436 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1437 }
1438
1439 func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
1440 self.Emit("PXOR" , _X0, _X0)
1441 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1442 }
1443
1444 func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
1445 self.Emit("XORL" , _AX, _AX)
1446 self.Emit("PXOR" , _X0, _X0)
1447 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1448 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))
1449 }
1450
1451 func (self *_Assembler) _asm_OP_deref(p *_Instr) {
1452 self.vfollow(p.vt())
1453 }
1454
1455 func (self *_Assembler) _asm_OP_index(p *_Instr) {
1456 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
1457 self.Emit("ADDQ", _AX, _VP)
1458 }
1459
1460 func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
1461 self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX)
1462 self.Emit("CMPQ" , _AX, _IL)
1463 self.Sjmp("JA" , "_not_null_{n}")
1464 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1465 self.Emit("CMOVQEQ", _AX, _IC)
1466 self.Xjmp("JE" , p.vi())
1467 self.Link("_not_null_{n}")
1468 }
1469
1470 func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
1471 self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX)
1472 self.Emit("CMPQ" , _AX, _IL)
1473 self.Sjmp("JA" , "_not_null_quote_{n}")
1474 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1475 self.Sjmp("JNE" , "_not_null_quote_{n}")
1476 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))
1477 self.Emit("CMOVQEQ", _AX, _IC)
1478 self.Xjmp("JE" , p.vi())
1479 self.Link("_not_null_quote_{n}")
1480 }
1481
1482 func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
1483 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1484 self.Emit("TESTQ", _AX, _AX)
1485 self.Sjmp("JNZ" , "_end_{n}")
1486 self.call_go(_F_makemap_small)
1487 self.WritePtrAX(6, jit.Ptr(_VP, 0), false)
1488 self.Link("_end_{n}")
1489 self.Emit("MOVQ" , _AX, _VP)
1490 }
1491
1492 func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
1493 self.parse_signed(int8Type, "", p.vi())
1494 self.range_signed_CX(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1495 self.match_char('"')
1496 self.mapassign_std(p.vt(), _VAR_st_Iv)
1497 }
1498
1499 func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
1500 self.parse_signed(int16Type, "", p.vi())
1501 self.range_signed_CX(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1502 self.match_char('"')
1503 self.mapassign_std(p.vt(), _VAR_st_Iv)
1504 }
1505
1506 func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
1507 self.parse_signed(int32Type, "", p.vi())
1508 self.range_signed_CX(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1509 self.match_char('"')
1510 if vt := p.vt(); !mapfast(vt) {
1511 self.mapassign_std(vt, _VAR_st_Iv)
1512 } else {
1513 self.Emit("MOVQ", _CX, _AX)
1514 self.mapassign_fastx(vt, _F_mapassign_fast32)
1515 }
1516 }
1517
1518 func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
1519 self.parse_signed(int64Type, "", p.vi())
1520 self.match_char('"')
1521 if vt := p.vt(); !mapfast(vt) {
1522 self.mapassign_std(vt, _VAR_st_Iv)
1523 } else {
1524 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1525 self.mapassign_fastx(vt, _F_mapassign_fast64)
1526 }
1527 }
1528
1529 func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
1530 self.parse_unsigned(uint8Type, "", p.vi())
1531 self.range_unsigned_CX(_I_uint8, _T_uint8, math.MaxUint8)
1532 self.match_char('"')
1533 self.mapassign_std(p.vt(), _VAR_st_Iv)
1534 }
1535
1536 func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
1537 self.parse_unsigned(uint16Type, "", p.vi())
1538 self.range_unsigned_CX(_I_uint16, _T_uint16, math.MaxUint16)
1539 self.match_char('"')
1540 self.mapassign_std(p.vt(), _VAR_st_Iv)
1541 }
1542
1543 func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
1544 self.parse_unsigned(uint32Type, "", p.vi())
1545 self.range_unsigned_CX(_I_uint32, _T_uint32, math.MaxUint32)
1546 self.match_char('"')
1547 if vt := p.vt(); !mapfast(vt) {
1548 self.mapassign_std(vt, _VAR_st_Iv)
1549 } else {
1550 self.Emit("MOVQ", _CX, _AX)
1551 self.mapassign_fastx(vt, _F_mapassign_fast32)
1552 }
1553 }
1554
1555 func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
1556 self.parse_unsigned(uint64Type, "", p.vi())
1557 self.match_char('"')
1558 if vt := p.vt(); !mapfast(vt) {
1559 self.mapassign_std(vt, _VAR_st_Iv)
1560 } else {
1561 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1562 self.mapassign_fastx(vt, _F_mapassign_fast64)
1563 }
1564 }
1565
1566 func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
1567 self.parse_number(float32Type, "", p.vi())
1568 self.range_single_X0()
1569 self.Emit("MOVSS", _X0, _VAR_st_Dv)
1570 self.match_char('"')
1571 self.mapassign_std(p.vt(), _VAR_st_Dv)
1572 }
1573
1574 func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
1575 self.parse_number(float64Type, "", p.vi())
1576 self.match_char('"')
1577 self.mapassign_std(p.vt(), _VAR_st_Dv)
1578 }
1579
1580 func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
1581 self.parse_string()
1582 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1583 if vt := p.vt(); !mapfast(vt) {
1584 self.valloc(vt.Key(), _DI)
1585 self.Emit("MOVOU", _ARG_sv, _X0)
1586 self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
1587 self.mapassign_std(vt, jit.Ptr(_DI, 0))
1588 } else {
1589 self.mapassign_str_fast(vt, _ARG_sv_p, _ARG_sv_n)
1590 }
1591 }
1592
1593 func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
1594 self.parse_string()
1595 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1596 self.mapassign_utext(p.vt(), false)
1597 }
1598
1599 func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
1600 self.parse_string()
1601 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, true)
1602 self.mapassign_utext(p.vt(), true)
1603 }
1604
1605 func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
1606 self.call_sf(_F_skip_array)
1607 self.Emit("TESTQ", _AX, _AX)
1608 self.Sjmp("JS" , _LB_parsing_error_v)
1609 }
1610
1611 func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
1612 self.mem_clear_rem(p.i64(), true)
1613 }
1614
1615 func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
1616 self.mem_clear_rem(p.i64(), false)
1617 }
1618
1619 func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
1620 self.Emit("XORL" , _AX, _AX)
1621 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1622 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _BX)
1623 self.Emit("TESTQ", _BX, _BX)
1624 self.Sjmp("JNZ" , "_done_{n}")
1625 self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)
1626 self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))
1627 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1628 self.call_go(_F_makeslice)
1629 self.WritePtrAX(7, jit.Ptr(_VP, 0), false)
1630 self.Emit("XORL" , _AX, _AX)
1631 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1632 self.Link("_done_{n}")
1633 }
1634
1635 func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
1636 rbracket := p.vb()
1637 if rbracket == ']' {
1638 self.check_eof(1)
1639 self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)
1640 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket)))
1641 self.Sjmp("JNE" , "_not_empty_array_{n}")
1642 self.Emit("MOVQ", _AX, _IC)
1643 self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
1644 self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
1645 self.Emit("PXOR", _X0, _X0)
1646 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))
1647 self.Xjmp("JMP" , p.vi())
1648 self.Link("_not_empty_array_{n}")
1649 } else {
1650 panic("only implement check empty array here!")
1651 }
1652 }
1653
1654 func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
1655 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)
1656 self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))
1657 self.Sjmp("JB" , "_index_{n}")
1658 self.Emit("MOVQ" , _AX, _SI)
1659 self.Emit("SHLQ" , jit.Imm(1), _SI)
1660 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1661 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _BX)
1662 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)
1663 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _DI)
1664 self.call_go(_F_growslice)
1665 self.WritePtrAX(8, jit.Ptr(_VP, 0), false)
1666 self.Emit("MOVQ" , _BX, jit.Ptr(_VP, 8))
1667 self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))
1668
1669
1670
1671 if rt.UnpackType(p.vt()).PtrData == 0 {
1672 self.Emit("MOVQ" , _CX, _DI)
1673 self.Emit("SUBQ" , _BX, _DI)
1674
1675 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1676 self.Emit("MOVQ" , _AX, _VP)
1677 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1678 self.Emit("MOVQ" , _BX, _AX)
1679 self.From("MULQ" , _CX)
1680 self.Emit("ADDQ" , _AX, _VP)
1681
1682 self.Emit("MOVQ" , _DI, _AX)
1683 self.From("MULQ" , _CX)
1684 self.Emit("MOVQ" , _AX, _BX)
1685 self.Emit("MOVQ" , _VP, _AX)
1686 self.mem_clear_fn(true)
1687 self.Sjmp("JMP", "_append_slice_end_{n}")
1688 }
1689
1690 self.Emit("MOVQ" , _BX, _AX)
1691 self.Link("_index_{n}")
1692 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1693 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)
1694 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1695 self.From("MULQ" , _CX)
1696 self.Emit("ADDQ" , _AX, _VP)
1697 self.Link("_append_slice_end_{n}")
1698 }
1699
1700 func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
1701 self.call_sf(_F_skip_object)
1702 self.Emit("TESTQ", _AX, _AX)
1703 self.Sjmp("JS" , _LB_parsing_error_v)
1704 }
1705
1706 func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
1707 self.call_sf(_F_skip_one)
1708 self.Emit("TESTQ", _AX, _AX)
1709 self.Sjmp("JS" , _LB_parsing_error_v)
1710 }
1711
1712 func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
1713 assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
1714 self.Emit("MOVQ" , jit.Imm(-1), _AX)
1715 self.Emit("MOVQ" , _AX, _VAR_sr)
1716 self.parse_string()
1717 self.unquote_once(_ARG_sv_p, _ARG_sv_n, true, false)
1718 self.Emit("LEAQ" , _ARG_sv, _AX)
1719 self.Emit("XORL" , _BX, _BX)
1720 self.call_go(_F_strhash)
1721 self.Emit("MOVQ" , _AX, _R9)
1722 self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)
1723 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)
1724 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)
1725 self.Emit("TESTQ", _CX, _CX)
1726 self.Sjmp("JZ" , "_try_lowercase_{n}")
1727 self.Link("_loop_{n}")
1728 self.Emit("XORL" , _DX, _DX)
1729 self.From("DIVQ" , _CX)
1730 self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)
1731 self.Emit("SHLQ" , jit.Imm(5), _DX)
1732 self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)
1733 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)
1734 self.Emit("TESTQ", _R8, _R8)
1735 self.Sjmp("JZ" , "_try_lowercase_{n}")
1736 self.Emit("CMPQ" , _R8, _R9)
1737 self.Sjmp("JNE" , "_loop_{n}")
1738 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)
1739 self.Emit("CMPQ" , _DX, _ARG_sv_n)
1740 self.Sjmp("JNE" , "_loop_{n}")
1741 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)
1742 self.Emit("MOVQ" , _AX, _VAR_ss_AX)
1743 self.Emit("MOVQ" , _CX, _VAR_ss_CX)
1744 self.Emit("MOVQ" , _SI, _VAR_ss_SI)
1745 self.Emit("MOVQ" , _R8, _VAR_ss_R8)
1746 self.Emit("MOVQ" , _R9, _VAR_ss_R9)
1747 self.Emit("MOVQ" , _ARG_sv_p, _AX)
1748 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)
1749 self.Emit("MOVQ" , _CX, _BX)
1750 self.Emit("MOVQ" , _DX, _CX)
1751 self.call_go(_F_memequal)
1752 self.Emit("MOVB" , _AX, _DX)
1753 self.Emit("MOVQ" , _VAR_ss_AX, _AX)
1754 self.Emit("MOVQ" , _VAR_ss_CX, _CX)
1755 self.Emit("MOVQ" , _VAR_ss_SI, _SI)
1756 self.Emit("MOVQ" , _VAR_ss_R9, _R9)
1757 self.Emit("TESTB", _DX, _DX)
1758 self.Sjmp("JZ" , "_loop_{n}")
1759 self.Emit("MOVQ" , _VAR_ss_R8, _R8)
1760 self.Emit("MOVQ" , _R8, _VAR_sr)
1761 self.Sjmp("JMP" , "_end_{n}")
1762 self.Link("_try_lowercase_{n}")
1763 self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)
1764 self.Emit("MOVQ", _ARG_sv_p, _BX)
1765 self.Emit("MOVQ", _ARG_sv_n, _CX)
1766 self.call_go(_F_FieldMap_GetCaseInsensitive)
1767 self.Emit("MOVQ" , _AX, _VAR_sr)
1768 self.Emit("TESTQ", _AX, _AX)
1769 self.Sjmp("JNS" , "_end_{n}")
1770 self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv)
1771 self.Sjmp("JC" , _LB_field_error)
1772 self.Link("_end_{n}")
1773 }
1774
1775 func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
1776 self.unmarshal_json(p.vt(), true)
1777 }
1778
1779 func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
1780 self.unmarshal_json(p.vt(), false)
1781 }
1782
1783 func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
1784 self.unmarshal_text(p.vt(), true)
1785 }
1786
1787 func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
1788 self.unmarshal_text(p.vt(), false)
1789 }
1790
1791 func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
1792 self.lspace("_{n}")
1793 }
1794
1795 func (self *_Assembler) lspace(subfix string) {
1796 var label = "_lspace" + subfix
1797 self.Emit("CMPQ" , _IC, _IL)
1798 self.Sjmp("JAE" , _LB_eof_error)
1799 self.Emit("MOVQ" , jit.Imm(_BM_space), _DX)
1800 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1801 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1802 self.Sjmp("JA" , label)
1803 self.Emit("BTQ" , _AX, _DX)
1804 self.Sjmp("JNC" , label)
1805
1806
1807 for i := 0; i < 3; i++ {
1808 self.Emit("ADDQ" , jit.Imm(1), _IC)
1809 self.Emit("CMPQ" , _IC, _IL)
1810 self.Sjmp("JAE" , _LB_eof_error)
1811 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1812 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1813 self.Sjmp("JA" , label)
1814 self.Emit("BTQ" , _AX, _DX)
1815 self.Sjmp("JNC" , label)
1816 }
1817
1818
1819 self.Emit("MOVQ" , _IP, _DI)
1820 self.Emit("MOVQ" , _IL, _SI)
1821 self.Emit("MOVQ" , _IC, _DX)
1822 self.callc(_F_lspace)
1823 self.Emit("TESTQ" , _AX, _AX)
1824 self.Sjmp("JS" , _LB_parsing_error_v)
1825 self.Emit("CMPQ" , _AX, _IL)
1826 self.Sjmp("JAE" , _LB_eof_error)
1827 self.Emit("MOVQ" , _AX, _IC)
1828 self.Link(label)
1829 }
1830
1831 func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
1832 self.match_char(p.vb())
1833 }
1834
1835 func (self *_Assembler) match_char(char byte) {
1836 self.check_eof(1)
1837 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))
1838 self.Sjmp("JNE" , _LB_char_0_error)
1839 self.Emit("ADDQ", jit.Imm(1), _IC)
1840 }
1841
1842 func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
1843 self.check_eof(1)
1844 self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX)
1845 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1846 self.Emit("CMOVQEQ", _AX, _IC)
1847 self.Xjmp("JE" , p.vi())
1848 }
1849
1850 func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
1851 self.check_eof(1)
1852 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1853 self.Xjmp("JE" , p.vi())
1854 }
1855
1856 func (self *_Assembler) _asm_OP_add(p *_Instr) {
1857 self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)
1858 }
1859
1860 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
1861 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1862 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)
1863 }
1864
1865 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1866 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
1867 self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))
1868 self.Sjmp("JAE" , _LB_stack_error)
1869 self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false)
1870 self.Emit("ADDQ", jit.Imm(8), _CX)
1871 self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))
1872 }
1873
1874 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1875 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1876 self.Emit("SUBQ", jit.Imm(8), _AX)
1877 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)
1878 self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))
1879 self.Emit("XORL", _BX, _BX)
1880 self.Emit("MOVQ", _BX, jit.Sib(_ST, _AX, 1, 8))
1881 }
1882
1883 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1884 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
1885 self.Emit("SUBQ" , jit.Imm(16), _AX)
1886 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)
1887 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
1888 self.Emit("PXOR" , _X0, _X0)
1889 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
1890 }
1891
1892 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1893 self.Emit("MOVQ", jit.Type(p.vt()), _AX)
1894 self.decode_dynamic(_AX, _VP)
1895 }
1896
1897 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1898 self.Xjmp("JMP", p.vi())
1899 }
1900
1901 func (self *_Assembler) _asm_OP_switch(p *_Instr) {
1902 self.Emit("MOVQ", _VAR_sr, _AX)
1903 self.Emit("CMPQ", _AX, jit.Imm(p.i64()))
1904 self.Sjmp("JAE" , "_default_{n}")
1905
1906
1907 self.Byte(0x48, 0x8d, 0x3d)
1908 self.Sref("_switch_table_{n}", 4)
1909 self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)
1910 self.Emit("ADDQ" , _DI, _AX)
1911 self.Rjmp("JMP" , _AX)
1912 self.Link("_switch_table_{n}")
1913
1914
1915 for i, v := range p.vs() {
1916 self.Xref(v, int64(-i) * 4)
1917 }
1918
1919
1920 self.Link("_default_{n}")
1921 self.NOP()
1922 }
1923
1924 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1925 self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX)
1926 self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX)
1927 self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
1928 self.call_go(_F_println)
1929 }
1930
View as plain text