1
2
3
18
19 package decoder
20
21 import (
22 `encoding/json`
23 `fmt`
24 `math`
25 `reflect`
26 `unsafe`
27
28 `github.com/bytedance/sonic/internal/caching`
29 `github.com/bytedance/sonic/internal/jit`
30 `github.com/bytedance/sonic/internal/native`
31 `github.com/bytedance/sonic/internal/native/types`
32 `github.com/bytedance/sonic/internal/rt`
33 `github.com/twitchyliquid64/golang-asm/obj`
34 )
35
36
51
52
66
67 const (
68 _FP_args = 96
69 _FP_fargs = 80
70 _FP_saves = 40
71 _FP_locals = 144
72 )
73
74 const (
75 _FP_offs = _FP_fargs + _FP_saves + _FP_locals
76 _FP_size = _FP_offs + 8
77 _FP_base = _FP_size + 8
78 )
79
80 const (
81 _IM_null = 0x6c6c756e
82 _IM_true = 0x65757274
83 _IM_alse = 0x65736c61
84 )
85
86 const (
87 _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
88 )
89
90 const (
91 _MODE_JSON = 1 << 3
92 )
93
94 const (
95 _LB_error = "_error"
96 _LB_im_error = "_im_error"
97 _LB_eof_error = "_eof_error"
98 _LB_type_error = "_type_error"
99 _LB_field_error = "_field_error"
100 _LB_range_error = "_range_error"
101 _LB_stack_error = "_stack_error"
102 _LB_base64_error = "_base64_error"
103 _LB_unquote_error = "_unquote_error"
104 _LB_parsing_error = "_parsing_error"
105 _LB_parsing_error_v = "_parsing_error_v"
106 _LB_mismatch_error = "_mismatch_error"
107 )
108
109 const (
110 _LB_char_0_error = "_char_0_error"
111 _LB_char_1_error = "_char_1_error"
112 _LB_char_2_error = "_char_2_error"
113 _LB_char_3_error = "_char_3_error"
114 _LB_char_4_error = "_char_4_error"
115 _LB_char_m2_error = "_char_m2_error"
116 _LB_char_m3_error = "_char_m3_error"
117 )
118
119 const (
120 _LB_skip_one = "_skip_one"
121 _LB_skip_key_value = "_skip_key_value"
122 )
123
124 var (
125 _AX = jit.Reg("AX")
126 _CX = jit.Reg("CX")
127 _DX = jit.Reg("DX")
128 _DI = jit.Reg("DI")
129 _SI = jit.Reg("SI")
130 _BP = jit.Reg("BP")
131 _SP = jit.Reg("SP")
132 _R8 = jit.Reg("R8")
133 _R9 = jit.Reg("R9")
134 _X0 = jit.Reg("X0")
135 _X1 = jit.Reg("X1")
136 )
137
138 var (
139 _ST = jit.Reg("BX")
140 _IP = jit.Reg("R12")
141 _IL = jit.Reg("R13")
142 _IC = jit.Reg("R14")
143 _VP = jit.Reg("R15")
144 )
145
146 var (
147 _R10 = jit.Reg("R10")
148 _DF = jit.Reg("R10")
149 _ET = jit.Reg("R10")
150 _EP = jit.Reg("R11")
151 )
152
153 var (
154 _ARG_s = _ARG_sp
155 _ARG_sp = jit.Ptr(_SP, _FP_base)
156 _ARG_sl = jit.Ptr(_SP, _FP_base + 8)
157 _ARG_ic = jit.Ptr(_SP, _FP_base + 16)
158 _ARG_vp = jit.Ptr(_SP, _FP_base + 24)
159 _ARG_sb = jit.Ptr(_SP, _FP_base + 32)
160 _ARG_fv = jit.Ptr(_SP, _FP_base + 40)
161 )
162
163 var (
164 _VAR_sv = _VAR_sv_p
165 _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48)
166 _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56)
167 _VAR_vk = jit.Ptr(_SP, _FP_base + 64)
168 )
169
170 var (
171 _RET_rc = jit.Ptr(_SP, _FP_base + 72)
172 _RET_et = jit.Ptr(_SP, _FP_base + 80)
173 _RET_ep = jit.Ptr(_SP, _FP_base + 88)
174 )
175
176 var (
177 _VAR_st = _VAR_st_Vt
178 _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves)
179 )
180
181
182 var (
183 _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0)
184 _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8)
185 _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16)
186 _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24)
187 _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32)
188 _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40)
189 )
190
191 var (
192 _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48)
193 _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56)
194 _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64)
195 _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72)
196 _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80)
197 )
198
199 var (
200 _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88)
201 _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96)
202 _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104)
203 )
204
205 var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112)
206
207 var (
208 _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120)
209 _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128)
210 _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136)
211 )
212
213 type _Assembler struct {
214 jit.BaseAssembler
215 p _Program
216 name string
217 }
218
219 func newAssembler(p _Program) *_Assembler {
220 return new(_Assembler).Init(p)
221 }
222
223
224
225 func (self *_Assembler) Load() _Decoder {
226 return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs))
227 }
228
229 func (self *_Assembler) Init(p _Program) *_Assembler {
230 self.p = p
231 self.BaseAssembler.Init(self.compile)
232 return self
233 }
234
235 func (self *_Assembler) compile() {
236 self.prologue()
237 self.instrs()
238 self.epilogue()
239 self.copy_string()
240 self.escape_string()
241 self.escape_string_twice()
242 self.skip_one()
243 self.skip_key_value()
244 self.mismatch_error()
245 self.type_error()
246 self.field_error()
247 self.range_error()
248 self.stack_error()
249 self.base64_error()
250 self.parsing_error()
251 }
252
253
254
255 var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
256 _OP_any : (*_Assembler)._asm_OP_any,
257 _OP_dyn : (*_Assembler)._asm_OP_dyn,
258 _OP_str : (*_Assembler)._asm_OP_str,
259 _OP_bin : (*_Assembler)._asm_OP_bin,
260 _OP_bool : (*_Assembler)._asm_OP_bool,
261 _OP_num : (*_Assembler)._asm_OP_num,
262 _OP_i8 : (*_Assembler)._asm_OP_i8,
263 _OP_i16 : (*_Assembler)._asm_OP_i16,
264 _OP_i32 : (*_Assembler)._asm_OP_i32,
265 _OP_i64 : (*_Assembler)._asm_OP_i64,
266 _OP_u8 : (*_Assembler)._asm_OP_u8,
267 _OP_u16 : (*_Assembler)._asm_OP_u16,
268 _OP_u32 : (*_Assembler)._asm_OP_u32,
269 _OP_u64 : (*_Assembler)._asm_OP_u64,
270 _OP_f32 : (*_Assembler)._asm_OP_f32,
271 _OP_f64 : (*_Assembler)._asm_OP_f64,
272 _OP_unquote : (*_Assembler)._asm_OP_unquote,
273 _OP_nil_1 : (*_Assembler)._asm_OP_nil_1,
274 _OP_nil_2 : (*_Assembler)._asm_OP_nil_2,
275 _OP_nil_3 : (*_Assembler)._asm_OP_nil_3,
276 _OP_deref : (*_Assembler)._asm_OP_deref,
277 _OP_index : (*_Assembler)._asm_OP_index,
278 _OP_is_null : (*_Assembler)._asm_OP_is_null,
279 _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote,
280 _OP_map_init : (*_Assembler)._asm_OP_map_init,
281 _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8,
282 _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16,
283 _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32,
284 _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64,
285 _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8,
286 _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16,
287 _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32,
288 _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64,
289 _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32,
290 _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64,
291 _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str,
292 _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext,
293 _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p,
294 _OP_array_skip : (*_Assembler)._asm_OP_array_skip,
295 _OP_array_clear : (*_Assembler)._asm_OP_array_clear,
296 _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
297 _OP_slice_init : (*_Assembler)._asm_OP_slice_init,
298 _OP_slice_append : (*_Assembler)._asm_OP_slice_append,
299 _OP_object_skip : (*_Assembler)._asm_OP_object_skip,
300 _OP_object_next : (*_Assembler)._asm_OP_object_next,
301 _OP_struct_field : (*_Assembler)._asm_OP_struct_field,
302 _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
303 _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p,
304 _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text,
305 _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p,
306 _OP_lspace : (*_Assembler)._asm_OP_lspace,
307 _OP_match_char : (*_Assembler)._asm_OP_match_char,
308 _OP_check_char : (*_Assembler)._asm_OP_check_char,
309 _OP_load : (*_Assembler)._asm_OP_load,
310 _OP_save : (*_Assembler)._asm_OP_save,
311 _OP_drop : (*_Assembler)._asm_OP_drop,
312 _OP_drop_2 : (*_Assembler)._asm_OP_drop_2,
313 _OP_recurse : (*_Assembler)._asm_OP_recurse,
314 _OP_goto : (*_Assembler)._asm_OP_goto,
315 _OP_switch : (*_Assembler)._asm_OP_switch,
316 _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
317 _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
318 _OP_go_skip : (*_Assembler)._asm_OP_go_skip,
319 _OP_add : (*_Assembler)._asm_OP_add,
320 _OP_check_empty : (*_Assembler)._asm_OP_check_empty,
321 }
322
323 func (self *_Assembler) instr(v *_Instr) {
324 if fn := _OpFuncTab[v.op()]; fn != nil {
325 fn(self, v)
326 } else {
327 panic(fmt.Sprintf("invalid opcode: %d", v.op()))
328 }
329 }
330
331 func (self *_Assembler) instrs() {
332 for i, v := range self.p {
333 self.Mark(i)
334 self.instr(&v)
335 self.debug_instr(i, &v)
336 }
337 }
338
339 func (self *_Assembler) epilogue() {
340 self.Mark(len(self.p))
341 self.Emit("XORL", _EP, _EP)
342 self.Emit("MOVQ", _VAR_et, _ET)
343 self.Emit("TESTQ", _ET, _ET)
344 self.Sjmp("JNZ", _LB_mismatch_error)
345 self.Link(_LB_error)
346 self.Emit("MOVQ", _IC, _RET_rc)
347 self.Emit("MOVQ", _ET, _RET_et)
348 self.Emit("MOVQ", _EP, _RET_ep)
349 self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP)
350 self.Emit("ADDQ", jit.Imm(_FP_size), _SP)
351 self.Emit("RET")
352 }
353
354 func (self *_Assembler) prologue() {
355 self.Emit("SUBQ", jit.Imm(_FP_size), _SP)
356 self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs))
357 self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP)
358 self.Emit("MOVQ", _ARG_sp, _IP)
359 self.Emit("MOVQ", _ARG_sl, _IL)
360 self.Emit("MOVQ", _ARG_ic, _IC)
361 self.Emit("MOVQ", _ARG_vp, _VP)
362 self.Emit("MOVQ", _ARG_sb, _ST)
363
364 self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc)
365 self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX)
366 self.Emit("MOVQ", _AX, _VAR_st_Db)
367 self.Emit("XORL", _AX, _AX)
368 self.Emit("MOVQ", _AX, _VAR_et)
369 }
370
371
372
373 var _REG_go = []obj.Addr {
374 _ST,
375 _VP,
376 _IP,
377 _IL,
378 _IC,
379 }
380
381 func (self *_Assembler) save(r ...obj.Addr) {
382 for i, v := range r {
383 if i > _FP_saves / 8 - 1 {
384 panic("too many registers to save")
385 } else {
386 self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8))
387 }
388 }
389 }
390
391 func (self *_Assembler) load(r ...obj.Addr) {
392 for i, v := range r {
393 if i > _FP_saves / 8 - 1 {
394 panic("too many registers to load")
395 } else {
396 self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v)
397 }
398 }
399 }
400
401 func (self *_Assembler) call(fn obj.Addr) {
402 self.Emit("MOVQ", fn, _AX)
403 self.Rjmp("CALL", _AX)
404 }
405
406 func (self *_Assembler) call_go(fn obj.Addr) {
407 self.save(_REG_go...)
408 self.call(fn)
409 self.load(_REG_go...)
410 }
411
412 func (self *_Assembler) call_sf(fn obj.Addr) {
413 self.Emit("LEAQ", _ARG_s, _DI)
414 self.Emit("MOVQ", _IC, _ARG_ic)
415 self.Emit("LEAQ", _ARG_ic, _SI)
416 self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX)
417 self.Emit("MOVQ", _ARG_fv, _CX)
418 self.call(fn)
419 self.Emit("MOVQ", _ARG_ic, _IC)
420 }
421
422 func (self *_Assembler) call_vf(fn obj.Addr) {
423 self.Emit("LEAQ", _ARG_s, _DI)
424 self.Emit("MOVQ", _IC, _ARG_ic)
425 self.Emit("LEAQ", _ARG_ic, _SI)
426 self.Emit("LEAQ", _VAR_st, _DX)
427 self.call(fn)
428 self.Emit("MOVQ", _ARG_ic, _IC)
429 }
430
431
432
433 var (
434 _F_convT64 = jit.Func(convT64)
435 _F_error_wrap = jit.Func(error_wrap)
436 _F_error_type = jit.Func(error_type)
437 _F_error_field = jit.Func(error_field)
438 _F_error_value = jit.Func(error_value)
439 _F_error_mismatch = jit.Func(error_mismatch)
440 )
441
442 var (
443 _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0)))
444 _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0)))
445 _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0)))
446 _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0)))
447 _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0)))
448 _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0)))
449 _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0)))
450 )
451
452 var (
453 _T_error = rt.UnpackType(errorType)
454 _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError)
455 )
456
457 var (
458 _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
459 _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
460 _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
461 )
462
463 func (self *_Assembler) type_error() {
464 self.Link(_LB_type_error)
465 self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0))
466 self.call_go(_F_error_type)
467 self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET)
468 self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP)
469 self.Sjmp("JMP" , _LB_error)
470 }
471
472
473 func (self *_Assembler) mismatch_error() {
474 self.Link(_LB_mismatch_error)
475 self.Emit("MOVQ", _VAR_et, _ET)
476 self.Emit("MOVQ", _VAR_ic, _EP)
477 self.Emit("MOVQ", _I_json_MismatchTypeError, _AX)
478 self.Emit("CMPQ", _ET, _AX)
479 self.Sjmp("JE" , _LB_error)
480 self.Emit("MOVQ", _ARG_sp, _AX)
481 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
482 self.Emit("MOVQ", _ARG_sl, _CX)
483 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))
484 self.Emit("MOVQ", _VAR_ic, _AX)
485 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
486 self.Emit("MOVQ", _VAR_et, _CX)
487 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24))
488 self.call_go(_F_error_mismatch)
489 self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)
490 self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)
491 self.Sjmp("JMP" , _LB_error)
492 }
493
494 func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) {
495 self.Emit("MOVQ", _IC, _VAR_ic)
496 self.Emit("MOVQ", jit.Type(p.vt()), _ET)
497 self.Emit("MOVQ", _ET, _VAR_et)
498 }
499
500 func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
501 self.Byte(0x4c, 0x8d, 0x0d)
502 self.Xref(p.vi(), 4)
503 self.Emit("MOVQ", _R9, _VAR_pc)
504 self.Sjmp("JMP" , _LB_skip_one)
505 }
506
507 func (self *_Assembler) skip_one() {
508 self.Link(_LB_skip_one)
509 self.Emit("MOVQ", _VAR_ic, _IC)
510 self.call_sf(_F_skip_one)
511 self.Emit("TESTQ", _AX, _AX)
512 self.Sjmp("JS" , _LB_parsing_error_v)
513 self.Emit("MOVQ" , _VAR_pc, _R9)
514 self.Rjmp("JMP" , _R9)
515 }
516
517
518 func (self *_Assembler) skip_key_value() {
519 self.Link(_LB_skip_key_value)
520
521 self.Emit("MOVQ", _VAR_ic, _IC)
522 self.call_sf(_F_skip_one)
523 self.Emit("TESTQ", _AX, _AX)
524 self.Sjmp("JS" , _LB_parsing_error_v)
525
526 self.lspace("_global_1")
527 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':'))
528 self.Sjmp("JNE" , _LB_parsing_error_v)
529 self.Emit("ADDQ", jit.Imm(1), _IC)
530 self.lspace("_global_2")
531
532 self.call_sf(_F_skip_one)
533 self.Emit("TESTQ", _AX, _AX)
534 self.Sjmp("JS" , _LB_parsing_error_v)
535
536 self.Emit("MOVQ" , _VAR_pc, _R9)
537 self.Rjmp("JMP" , _R9)
538 }
539
540 func (self *_Assembler) field_error() {
541 self.Link(_LB_field_error)
542 self.Emit("MOVOU", _VAR_sv, _X0)
543 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))
544 self.call_go(_F_error_field)
545 self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET)
546 self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP)
547 self.Sjmp("JMP" , _LB_error)
548 }
549
550 func (self *_Assembler) range_error() {
551 self.Link(_LB_range_error)
552 self.slice_from(_VAR_st_Ep, 0)
553 self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0))
554 self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8))
555 self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16))
556 self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24))
557 self.call_go(_F_error_value)
558 self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET)
559 self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP)
560 self.Sjmp("JMP" , _LB_error)
561 }
562
563 func (self *_Assembler) stack_error() {
564 self.Link(_LB_stack_error)
565 self.Emit("MOVQ", _V_stackOverflow, _EP)
566 self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET)
567 self.Sjmp("JMP" , _LB_error)
568 }
569
570 func (self *_Assembler) base64_error() {
571 self.Link(_LB_base64_error)
572 self.Emit("NEGQ", _AX)
573 self.Emit("SUBQ", jit.Imm(1), _AX)
574 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
575 self.call_go(_F_convT64)
576 self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP)
577 self.Emit("MOVQ", _I_base64_CorruptInputError, _ET)
578 self.Sjmp("JMP" , _LB_error)
579 }
580
581 func (self *_Assembler) parsing_error() {
582 self.Link(_LB_eof_error)
583 self.Emit("MOVQ" , _IL, _IC)
584 self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP)
585 self.Sjmp("JMP" , _LB_parsing_error)
586 self.Link(_LB_unquote_error)
587 self.Emit("SUBQ" , _VAR_sr, _SI)
588 self.Emit("SUBQ" , _SI, _IC)
589 self.Link(_LB_parsing_error_v)
590 self.Emit("MOVQ" , _AX, _EP)
591 self.Emit("NEGQ" , _EP)
592 self.Sjmp("JMP" , _LB_parsing_error)
593 self.Link(_LB_char_m3_error)
594 self.Emit("SUBQ" , jit.Imm(1), _IC)
595 self.Link(_LB_char_m2_error)
596 self.Emit("SUBQ" , jit.Imm(2), _IC)
597 self.Sjmp("JMP" , _LB_char_0_error)
598 self.Link(_LB_im_error)
599 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0))
600 self.Sjmp("JNE" , _LB_char_0_error)
601 self.Emit("SHRL" , jit.Imm(8), _CX)
602 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1))
603 self.Sjmp("JNE" , _LB_char_1_error)
604 self.Emit("SHRL" , jit.Imm(8), _CX)
605 self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2))
606 self.Sjmp("JNE" , _LB_char_2_error)
607 self.Sjmp("JMP" , _LB_char_3_error)
608 self.Link(_LB_char_4_error)
609 self.Emit("ADDQ" , jit.Imm(1), _IC)
610 self.Link(_LB_char_3_error)
611 self.Emit("ADDQ" , jit.Imm(1), _IC)
612 self.Link(_LB_char_2_error)
613 self.Emit("ADDQ" , jit.Imm(1), _IC)
614 self.Link(_LB_char_1_error)
615 self.Emit("ADDQ" , jit.Imm(1), _IC)
616 self.Link(_LB_char_0_error)
617 self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP)
618 self.Link(_LB_parsing_error)
619 self.Emit("MOVOU", _ARG_s, _X0)
620 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))
621 self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))
622 self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24))
623 self.call_go(_F_error_wrap)
624 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
625 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
626 self.Sjmp("JMP" , _LB_error)
627 }
628
629
630
631 var (
632 _T_byte = jit.Type(byteType)
633 _F_mallocgc = jit.Func(mallocgc)
634 )
635
636 func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) {
637 self.Emit("XORL", _AX, _AX)
638 self.Emit("MOVQ", _T_byte, _CX)
639 self.Emit("MOVQ", nb, jit.Ptr(_SP, 0))
640 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))
641 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
642 self.call_go(_F_mallocgc)
643 self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)
644 }
645
646 func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) {
647 self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX)
648 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
649 self.Emit("MOVQ", jit.Type(vt), _AX)
650 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
651 self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16))
652 self.call_go(_F_mallocgc)
653 self.Emit("MOVQ", jit.Ptr(_SP, 24), ret)
654 }
655
656 func (self *_Assembler) vfollow(vt reflect.Type) {
657 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
658 self.Emit("TESTQ", _AX, _AX)
659 self.Sjmp("JNZ" , "_end_{n}")
660 self.valloc(vt, _AX)
661 self.WritePtrAX(1, jit.Ptr(_VP, 0), false)
662 self.Link("_end_{n}")
663 self.Emit("MOVQ" , _AX, _VP)
664 }
665
666
667
668 var (
669 _F_vstring = jit.Imm(int64(native.S_vstring))
670 _F_vnumber = jit.Imm(int64(native.S_vnumber))
671 _F_vsigned = jit.Imm(int64(native.S_vsigned))
672 _F_vunsigned = jit.Imm(int64(native.S_vunsigned))
673 )
674
675 func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) {
676 self.Emit("MOVQ" , _VAR_st_Vt, _AX)
677 self.Emit("TESTQ", _AX, _AX)
678
679 if vt != nil {
680 self.Sjmp("JNS" , "_check_err_{n}")
681 self.Emit("MOVQ", jit.Type(vt), _ET)
682 self.Emit("MOVQ", _ET, _VAR_et)
683 if pin2 != -1 {
684 self.Emit("SUBQ", jit.Imm(1), _BP)
685 self.Emit("MOVQ", _BP, _VAR_ic)
686 self.Byte(0x4c , 0x8d, 0x0d)
687 self.Xref(pin2, 4)
688 self.Emit("MOVQ", _R9, _VAR_pc)
689 self.Sjmp("JMP" , _LB_skip_key_value)
690 } else {
691 self.Emit("MOVQ", _BP, _VAR_ic)
692 self.Byte(0x4c , 0x8d, 0x0d)
693 self.Sref(pin, 4)
694 self.Emit("MOVQ", _R9, _VAR_pc)
695 self.Sjmp("JMP" , _LB_skip_one)
696 }
697 self.Link("_check_err_{n}")
698 } else {
699 self.Sjmp("JS" , _LB_parsing_error_v)
700 }
701 }
702
703 func (self *_Assembler) check_eof(d int64) {
704 if d == 1 {
705 self.Emit("CMPQ", _IC, _IL)
706 self.Sjmp("JAE" , _LB_eof_error)
707 } else {
708 self.Emit("LEAQ", jit.Ptr(_IC, d), _AX)
709 self.Emit("CMPQ", _AX, _IL)
710 self.Sjmp("JA" , _LB_eof_error)
711 }
712 }
713
714 func (self *_Assembler) parse_string() {
715 self.Emit("MOVQ", _ARG_fv, _CX)
716 self.call_vf(_F_vstring)
717 self.check_err(nil, "", -1)
718 }
719
720 func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) {
721 self.Emit("MOVQ", _IC, _BP)
722 self.call_vf(_F_vnumber)
723 self.check_err(vt, pin, pin2)
724 }
725
726 func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) {
727 self.Emit("MOVQ", _IC, _BP)
728 self.call_vf(_F_vsigned)
729 self.check_err(vt, pin, pin2)
730 }
731
732 func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) {
733 self.Emit("MOVQ", _IC, _BP)
734 self.call_vf(_F_vunsigned)
735 self.check_err(vt, pin, pin2)
736 }
737
738
739 func (self *_Assembler) copy_string() {
740 self.Link("_copy_string")
741 self.Emit("MOVQ", _DI, _VAR_bs_p)
742 self.Emit("MOVQ", _SI, _VAR_bs_n)
743 self.Emit("MOVQ", _R9, _VAR_bs_LR)
744 self.malloc(_SI, _AX)
745 self.Emit("MOVQ", _AX, _VAR_sv_p)
746 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
747 self.Emit("MOVQ", _VAR_bs_p, _DI)
748 self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8))
749 self.Emit("MOVQ", _VAR_bs_n, _SI)
750 self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16))
751 self.call_go(_F_memmove)
752 self.Emit("MOVQ", _VAR_sv_p, _DI)
753 self.Emit("MOVQ", _VAR_bs_n, _SI)
754 self.Emit("MOVQ", _VAR_bs_LR, _R9)
755 self.Rjmp("JMP", _R9)
756 }
757
758
759 func (self *_Assembler) escape_string() {
760 self.Link("_escape_string")
761 self.Emit("MOVQ" , _DI, _VAR_bs_p)
762 self.Emit("MOVQ" , _SI, _VAR_bs_n)
763 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
764 self.malloc(_SI, _DX)
765 self.Emit("MOVQ" , _DX, _VAR_sv_p)
766 self.Emit("MOVQ" , _VAR_bs_p, _DI)
767 self.Emit("MOVQ" , _VAR_bs_n, _SI)
768 self.Emit("LEAQ" , _VAR_sr, _CX)
769 self.Emit("XORL" , _R8, _R8)
770 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
771 self.Emit("SETCC", _R8)
772 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8)
773 self.call(_F_unquote)
774 self.Emit("MOVQ" , _VAR_bs_n, _SI)
775 self.Emit("ADDQ" , jit.Imm(1), _SI)
776 self.Emit("TESTQ", _AX, _AX)
777 self.Sjmp("JS" , _LB_unquote_error)
778 self.Emit("MOVQ" , _AX, _SI)
779 self.Emit("MOVQ" , _VAR_sv_p, _DI)
780 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
781 self.Rjmp("JMP", _R9)
782 }
783
784 func (self *_Assembler) escape_string_twice() {
785 self.Link("_escape_string_twice")
786 self.Emit("MOVQ" , _DI, _VAR_bs_p)
787 self.Emit("MOVQ" , _SI, _VAR_bs_n)
788 self.Emit("MOVQ" , _R9, _VAR_bs_LR)
789 self.malloc(_SI, _DX)
790 self.Emit("MOVQ" , _DX, _VAR_sv_p)
791 self.Emit("MOVQ" , _VAR_bs_p, _DI)
792 self.Emit("MOVQ" , _VAR_bs_n, _SI)
793 self.Emit("LEAQ" , _VAR_sr, _CX)
794 self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8)
795 self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv)
796 self.Emit("XORL" , _AX, _AX)
797 self.Emit("SETCC", _AX)
798 self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX)
799 self.Emit("ORQ" , _AX, _R8)
800 self.call(_F_unquote)
801 self.Emit("MOVQ" , _VAR_bs_n, _SI)
802 self.Emit("ADDQ" , jit.Imm(3), _SI)
803 self.Emit("TESTQ", _AX, _AX)
804 self.Sjmp("JS" , _LB_unquote_error)
805 self.Emit("MOVQ" , _AX, _SI)
806 self.Emit("MOVQ" , _VAR_sv_p, _DI)
807 self.Emit("MOVQ" , _VAR_bs_LR, _R9)
808 self.Rjmp("JMP", _R9)
809 }
810
811
812
813 var (
814 _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32))))
815 _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32))))
816 )
817
818 var (
819 _Vp_max_f32 = new(float32)
820 _Vp_min_f32 = new(float32)
821 )
822
823 func init() {
824 *_Vp_max_f32 = math.MaxFloat32
825 *_Vp_min_f32 = -math.MaxFloat32
826 }
827
828 func (self *_Assembler) range_single() {
829 self.Emit("CVTSD2SS", _VAR_st_Dv, _X0)
830 self.Emit("MOVQ" , _V_max_f32, _AX)
831 self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET)
832 self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP)
833 self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)
834 self.Sjmp("JA" , _LB_range_error)
835 self.Emit("MOVQ" , _V_min_f32, _AX)
836 self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0)
837 self.Sjmp("JB" , _LB_range_error)
838 }
839
840 func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
841 self.Emit("MOVQ", _VAR_st_Iv, _AX)
842 self.Emit("MOVQ", jit.Gitab(i), _ET)
843 self.Emit("MOVQ", jit.Gtype(t), _EP)
844 self.Emit("CMPQ", _AX, jit.Imm(a))
845 self.Sjmp("JL" , _LB_range_error)
846 self.Emit("CMPQ", _AX, jit.Imm(b))
847 self.Sjmp("JG" , _LB_range_error)
848 }
849
850 func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) {
851 self.Emit("MOVQ" , _VAR_st_Iv, _AX)
852 self.Emit("MOVQ" , jit.Gitab(i), _ET)
853 self.Emit("MOVQ" , jit.Gtype(t), _EP)
854 self.Emit("TESTQ", _AX, _AX)
855 self.Sjmp("JS" , _LB_range_error)
856 self.Emit("CMPQ" , _AX, jit.Imm(int64(v)))
857 self.Sjmp("JA" , _LB_range_error)
858 }
859
860
861
862 var (
863 _F_unquote = jit.Imm(int64(native.S_unquote))
864 )
865
866 func (self *_Assembler) slice_from(p obj.Addr, d int64) {
867 self.Emit("MOVQ", p, _SI)
868 self.slice_from_r(_SI, d)
869 }
870
871 func (self *_Assembler) slice_from_r(p obj.Addr, d int64) {
872 self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI)
873 self.Emit("NEGQ", p)
874 self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI)
875 }
876
877 func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) {
878 self.slice_from(_VAR_st_Iv, -1)
879 self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))
880 self.Sjmp("JE" , "_noescape_{n}")
881 self.Byte(0x4c, 0x8d, 0x0d)
882 self.Sref("_unquote_once_write_{n}", 4)
883 self.Sjmp("JMP" , "_escape_string")
884 self.Link("_noescape_{n}")
885 if copy {
886 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
887 self.Sjmp("JNC", "_unquote_once_write_{n}")
888 self.Byte(0x4c, 0x8d, 0x0d)
889 self.Sref("_unquote_once_write_{n}", 4)
890 self.Sjmp("JMP", "_copy_string")
891 }
892 self.Link("_unquote_once_write_{n}")
893 self.Emit("MOVQ" , _SI, n)
894 if stack {
895 self.Emit("MOVQ", _DI, p)
896 } else {
897 self.WriteRecNotAX(10, _DI, p, false, false)
898 }
899 }
900
901 func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) {
902 self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1))
903 self.Sjmp("JE" , _LB_eof_error)
904 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\'))
905 self.Sjmp("JNE" , _LB_char_m3_error)
906 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"'))
907 self.Sjmp("JNE" , _LB_char_m2_error)
908 self.slice_from(_VAR_st_Iv, -3)
909 self.Emit("MOVQ" , _SI, _AX)
910 self.Emit("ADDQ" , _VAR_st_Iv, _AX)
911 self.Emit("CMPQ" , _VAR_st_Ep, _AX)
912 self.Sjmp("JE" , "_noescape_{n}")
913 self.Byte(0x4c, 0x8d, 0x0d)
914 self.Sref("_unquote_twice_write_{n}", 4)
915 self.Sjmp("JMP" , "_escape_string_twice")
916 self.Link("_noescape_{n}")
917 self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv)
918 self.Sjmp("JNC", "_unquote_twice_write_{n}")
919 self.Byte(0x4c, 0x8d, 0x0d)
920 self.Sref("_unquote_twice_write_{n}", 4)
921 self.Sjmp("JMP", "_copy_string")
922 self.Link("_unquote_twice_write_{n}")
923 self.Emit("MOVQ" , _SI, n)
924 if stack {
925 self.Emit("MOVQ", _DI, p)
926 } else {
927 self.WriteRecNotAX(12, _DI, p, false, false)
928 }
929 }
930
931
932
933 var (
934 _F_memclrHasPointers = jit.Func(memclrHasPointers)
935 _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers)
936 )
937
938 func (self *_Assembler) mem_clear_fn(ptrfree bool) {
939 if !ptrfree {
940 self.call_go(_F_memclrHasPointers)
941 } else {
942 self.call_go(_F_memclrNoHeapPointers)
943 }
944 }
945
946 func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) {
947 self.Emit("MOVQ", jit.Imm(size), _CX)
948 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
949 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX)
950 self.Emit("SUBQ", _VP, _AX)
951 self.Emit("ADDQ", _AX, _CX)
952 self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0))
953 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8))
954 self.mem_clear_fn(ptrfree)
955 }
956
957
958
959 var (
960 _F_mapassign = jit.Func(mapassign)
961 _F_mapassign_fast32 = jit.Func(mapassign_fast32)
962 _F_mapassign_faststr = jit.Func(mapassign_faststr)
963 _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr)
964 )
965
966 var (
967 _F_decodeJsonUnmarshaler obj.Addr
968 _F_decodeTextUnmarshaler obj.Addr
969 )
970
971 func init() {
972 _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
973 _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
974 }
975
976 func (self *_Assembler) mapaccess_ptr(t reflect.Type) {
977 if rt.MapType(rt.UnpackType(t)).IndirectElem() {
978 self.vfollow(t.Elem())
979 }
980 }
981
982 func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) {
983 self.Emit("LEAQ", v, _AX)
984 self.mapassign_call(t, _F_mapassign)
985 }
986
987 func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) {
988 self.Emit("MOVQ", jit.Type(t), _AX)
989 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0))
990 self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))
991 self.Emit("MOVQ", p, jit.Ptr(_SP, 16))
992 self.Emit("MOVQ", n, jit.Ptr(_SP, 24))
993 self.call_go(_F_mapassign_faststr)
994 self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP)
995 self.mapaccess_ptr(t)
996 }
997
998 func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) {
999 self.Emit("MOVQ", jit.Type(t), _SI)
1000 self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0))
1001 self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))
1002 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
1003 self.call_go(fn)
1004 self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP)
1005 }
1006
1007 func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) {
1008 self.mapassign_call(t, fn)
1009 self.mapaccess_ptr(t)
1010 }
1011
1012 func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
1013 pv := false
1014 vk := t.Key()
1015 tk := t.Key()
1016
1017
1018 if vk.Kind() == reflect.Ptr {
1019 pv = true
1020 vk = vk.Elem()
1021 }
1022
1023
1024 if addressable {
1025 pv = false
1026 tk = reflect.PtrTo(tk)
1027 }
1028
1029
1030 self.valloc(vk, _DI)
1031
1032 self.Emit("MOVQ" , _DI, _VAR_vk)
1033 self.Emit("MOVQ" , jit.Type(tk), _AX)
1034 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1035 self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8))
1036 self.Emit("MOVOU", _VAR_sv, _X0)
1037 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))
1038 self.call_go(_F_decodeTextUnmarshaler)
1039 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
1040 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
1041 self.Emit("TESTQ", _ET, _ET)
1042 self.Sjmp("JNZ" , _LB_error)
1043 self.Emit("MOVQ" , _VAR_vk, _AX)
1044
1045
1046 if !pv {
1047 self.mapassign_call(t, _F_mapassign)
1048 } else {
1049 self.mapassign_fastx(t, _F_mapassign_fast64ptr)
1050 }
1051 }
1052
1053
1054
1055 var (
1056 _F_skip_one = jit.Imm(int64(native.S_skip_one))
1057 _F_skip_number = jit.Imm(int64(native.S_skip_number))
1058 )
1059
1060 func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
1061 self.call_sf(_F_skip_one)
1062 self.Emit("TESTQ", _AX, _AX)
1063 self.Sjmp("JS" , _LB_parsing_error_v)
1064 self.slice_from_r(_AX, 0)
1065 self.Emit("MOVQ" , _DI, _VAR_sv_p)
1066 self.Emit("MOVQ" , _SI, _VAR_sv_n)
1067 self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref)
1068 }
1069
1070 func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
1071 self.parse_string()
1072 self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)
1073 self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref)
1074 }
1075
1076 func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) {
1077 pt := t
1078 vk := t.Kind()
1079
1080
1081 if deref && vk == reflect.Ptr {
1082 self.Emit("MOVQ" , _VP, _AX)
1083 self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX)
1084 self.Emit("TESTQ", _AX, _AX)
1085 self.Sjmp("JNZ" , "_deref_{n}")
1086 self.valloc(t.Elem(), _AX)
1087 self.WritePtrAX(3, jit.Ptr(_VP, 0), false)
1088 self.Link("_deref_{n}")
1089 }
1090
1091
1092 self.Emit("MOVQ", jit.Type(pt), _CX)
1093 self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0))
1094
1095
1096 if deref && vk == reflect.Ptr {
1097 self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
1098 } else {
1099 self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8))
1100 }
1101
1102
1103 self.Emit("MOVOU", _VAR_sv, _X0)
1104 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16))
1105 self.call_go(fn)
1106 self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET)
1107 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP)
1108 self.Emit("TESTQ", _ET, _ET)
1109 self.Sjmp("JNZ" , _LB_error)
1110 }
1111
1112
1113
1114 var (
1115 _F_decodeTypedPointer obj.Addr
1116 )
1117
1118 func init() {
1119 _F_decodeTypedPointer = jit.Func(decodeTypedPointer)
1120 }
1121
1122 func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
1123 self.Emit("MOVQ" , _ARG_fv, _CX)
1124 self.Emit("MOVOU", _ARG_sp, _X0)
1125 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0))
1126 self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16))
1127 self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24))
1128 self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32))
1129 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40))
1130 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48))
1131 self.call_go(_F_decodeTypedPointer)
1132 self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET)
1133 self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP)
1134 self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC)
1135 self.Emit("TESTQ", _ET, _ET)
1136 self.Sjmp("JE", "_decode_dynamic_end_{n}")
1137 self.Emit("MOVQ", _I_json_MismatchTypeError, _AX)
1138 self.Emit("CMPQ", _ET, _AX)
1139 self.Sjmp("JNE" , _LB_error)
1140 self.Emit("MOVQ", _EP, _VAR_ic)
1141 self.Emit("MOVQ", _ET, _VAR_et)
1142 self.Link("_decode_dynamic_end_{n}")
1143
1144 }
1145
1146
1147
1148 var (
1149 _F_memequal = jit.Func(memequal)
1150 _F_memmove = jit.Func(memmove)
1151 _F_growslice = jit.Func(growslice)
1152 _F_makeslice = jit.Func(makeslice)
1153 _F_makemap_small = jit.Func(makemap_small)
1154 _F_mapassign_fast64 = jit.Func(mapassign_fast64)
1155 )
1156
1157 var (
1158 _F_lspace = jit.Imm(int64(native.S_lspace))
1159 _F_strhash = jit.Imm(int64(caching.S_strhash))
1160 )
1161
1162 var (
1163 _F_b64decode = jit.Imm(int64(_subr__b64decode))
1164 _F_decodeValue = jit.Imm(int64(_subr_decode_value))
1165 )
1166
1167 var (
1168 _F_skip_array = jit.Imm(int64(native.S_skip_array))
1169 _F_skip_object = jit.Imm(int64(native.S_skip_object))
1170 )
1171
1172 var (
1173 _F_FieldMap_GetCaseInsensitive obj.Addr
1174 _Empty_Slice = make([]byte, 0)
1175 _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr))
1176 )
1177
1178 const (
1179 _MODE_AVX2 = 1 << 2
1180 )
1181
1182 const (
1183 _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID))
1184 _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name))
1185 _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash))
1186 )
1187
1188 const (
1189 _Vk_Ptr = int64(reflect.Ptr)
1190 _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags))
1191 )
1192
1193 func init() {
1194 _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive)
1195 }
1196
1197 func (self *_Assembler) _asm_OP_any(_ *_Instr) {
1198 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX)
1199 self.Emit("TESTQ" , _CX, _CX)
1200 self.Sjmp("JZ" , "_decode_{n}")
1201 self.Emit("CMPQ" , _CX, _VP)
1202 self.Sjmp("JE" , "_decode_{n}")
1203 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1204 self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)
1205 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1206 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1207 self.Sjmp("JNE" , "_decode_{n}")
1208 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1209 self.decode_dynamic(_AX, _DI)
1210 self.Sjmp("JMP" , "_decode_end_{n}")
1211 self.Link("_decode_{n}")
1212 self.Emit("MOVQ" , _ARG_fv, _DF)
1213 self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0))
1214 self.call(_F_decodeValue)
1215 self.Emit("TESTQ" , _EP, _EP)
1216 self.Sjmp("JNZ" , _LB_parsing_error)
1217 self.Link("_decode_end_{n}")
1218 }
1219
1220 func (self *_Assembler) _asm_OP_dyn(p *_Instr) {
1221 self.Emit("MOVQ" , jit.Type(p.vt()), _ET)
1222 self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0))
1223 self.Sjmp("JE" , _LB_type_error)
1224 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1225 self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX)
1226 self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX)
1227 self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX)
1228 self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr))
1229 self.Sjmp("JNE" , _LB_type_error)
1230 self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI)
1231 self.decode_dynamic(_AX, _DI)
1232 self.Link("_decode_end_{n}")
1233 }
1234
1235 func (self *_Assembler) _asm_OP_str(_ *_Instr) {
1236 self.parse_string()
1237 self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true)
1238 }
1239
1240 func (self *_Assembler) _asm_OP_bin(_ *_Instr) {
1241 self.parse_string()
1242 self.slice_from(_VAR_st_Iv, -1)
1243 self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0))
1244 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8))
1245 self.Emit("SHRQ" , jit.Imm(2), _SI)
1246 self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI)
1247 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))
1248 self.malloc(_SI, _SI)
1249
1250
1251 self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX)
1252
1253
1254 self.Emit("XORL" , _DX, _DX)
1255 self.Emit("MOVQ" , _VP, _DI)
1256
1257 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9)
1258 self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false)
1259 self.Emit("MOVQ" , _R9, _SI)
1260
1261 self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8))
1262 self.call(_F_b64decode)
1263 self.Emit("TESTQ", _AX, _AX)
1264 self.Sjmp("JS" , _LB_base64_error)
1265 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1266 }
1267
1268 func (self *_Assembler) _asm_OP_bool(_ *_Instr) {
1269 self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX)
1270 self.Emit("CMPQ", _AX, _IL)
1271 self.Sjmp("JA" , _LB_eof_error)
1272 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f'))
1273 self.Sjmp("JE" , "_false_{n}")
1274 self.Emit("MOVL", jit.Imm(_IM_true), _CX)
1275 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1276 self.Sjmp("JE" , "_bool_true_{n}")
1277
1278
1279 self.Emit("MOVQ", _IC, _VAR_ic)
1280 self.Emit("MOVQ", _T_bool, _ET)
1281 self.Emit("MOVQ", _ET, _VAR_et)
1282 self.Byte(0x4c, 0x8d, 0x0d)
1283 self.Sref("_end_{n}", 4)
1284 self.Emit("MOVQ", _R9, _VAR_pc)
1285 self.Sjmp("JMP" , _LB_skip_one)
1286
1287 self.Link("_bool_true_{n}")
1288 self.Emit("MOVQ", _AX, _IC)
1289 self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0))
1290 self.Sjmp("JMP" , "_end_{n}")
1291 self.Link("_false_{n}")
1292 self.Emit("ADDQ", jit.Imm(1), _AX)
1293 self.Emit("ADDQ", jit.Imm(1), _IC)
1294 self.Emit("CMPQ", _AX, _IL)
1295 self.Sjmp("JA" , _LB_eof_error)
1296 self.Emit("MOVL", jit.Imm(_IM_alse), _CX)
1297 self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0))
1298 self.Sjmp("JNE" , _LB_im_error)
1299 self.Emit("MOVQ", _AX, _IC)
1300 self.Emit("XORL", _AX, _AX)
1301 self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))
1302 self.Link("_end_{n}")
1303 }
1304
1305 func (self *_Assembler) _asm_OP_num(_ *_Instr) {
1306 self.Emit("MOVQ", jit.Imm(0), _VAR_fl)
1307 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1308 self.Emit("MOVQ", _IC, _BP)
1309 self.Sjmp("JNE", "_skip_number_{n}")
1310 self.Emit("MOVQ", jit.Imm(1), _VAR_fl)
1311 self.Emit("ADDQ", jit.Imm(1), _IC)
1312 self.Link("_skip_number_{n}")
1313
1314
1315 self.call_sf(_F_skip_number)
1316 self.Emit("TESTQ", _AX, _AX)
1317 self.Sjmp("JNS" , "_num_next_{n}")
1318
1319
1320 self.Emit("MOVQ", _BP, _VAR_ic)
1321 self.Emit("MOVQ", _T_number, _ET)
1322 self.Emit("MOVQ", _ET, _VAR_et)
1323 self.Byte(0x4c, 0x8d, 0x0d)
1324 self.Sref("_num_end_{n}", 4)
1325 self.Emit("MOVQ", _R9, _VAR_pc)
1326 self.Sjmp("JMP" , _LB_skip_one)
1327
1328
1329 self.Link("_num_next_{n}")
1330 self.slice_from_r(_AX, 0)
1331 self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv)
1332 self.Sjmp("JNC", "_num_write_{n}")
1333 self.Byte(0x4c, 0x8d, 0x0d)
1334 self.Sref("_num_write_{n}", 4)
1335 self.Sjmp("JMP", "_copy_string")
1336 self.Link("_num_write_{n}")
1337 self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8))
1338 self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false)
1339
1340
1341 self.Emit("CMPQ", _VAR_fl, jit.Imm(1))
1342 self.Sjmp("JNE", "_num_end_{n}")
1343 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"'))
1344 self.Sjmp("JNE", _LB_char_0_error)
1345 self.Emit("ADDQ", jit.Imm(1), _IC)
1346 self.Link("_num_end_{n}")
1347 }
1348
1349 func (self *_Assembler) _asm_OP_i8(ins *_Instr) {
1350 var pin = "_i8_end_{n}"
1351 self.parse_signed(int8Type, pin, -1)
1352 self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1353 self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))
1354 self.Link(pin)
1355 }
1356
1357 func (self *_Assembler) _asm_OP_i16(ins *_Instr) {
1358 var pin = "_i16_end_{n}"
1359 self.parse_signed(int16Type, pin, -1)
1360 self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1361 self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))
1362 self.Link(pin)
1363 }
1364
1365 func (self *_Assembler) _asm_OP_i32(ins *_Instr) {
1366 var pin = "_i32_end_{n}"
1367 self.parse_signed(int32Type, pin, -1)
1368 self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1369 self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))
1370 self.Link(pin)
1371 }
1372
1373 func (self *_Assembler) _asm_OP_i64(ins *_Instr) {
1374 var pin = "_i64_end_{n}"
1375 self.parse_signed(int64Type, pin, -1)
1376 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1377 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1378 self.Link(pin)
1379 }
1380
1381 func (self *_Assembler) _asm_OP_u8(ins *_Instr) {
1382 var pin = "_u8_end_{n}"
1383 self.parse_unsigned(uint8Type, pin, -1)
1384 self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)
1385 self.Emit("MOVB", _AX, jit.Ptr(_VP, 0))
1386 self.Link(pin)
1387 }
1388
1389 func (self *_Assembler) _asm_OP_u16(ins *_Instr) {
1390 var pin = "_u16_end_{n}"
1391 self.parse_unsigned(uint16Type, pin, -1)
1392 self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)
1393 self.Emit("MOVW", _AX, jit.Ptr(_VP, 0))
1394 self.Link(pin)
1395 }
1396
1397 func (self *_Assembler) _asm_OP_u32(ins *_Instr) {
1398 var pin = "_u32_end_{n}"
1399 self.parse_unsigned(uint32Type, pin, -1)
1400 self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)
1401 self.Emit("MOVL", _AX, jit.Ptr(_VP, 0))
1402 self.Link(pin)
1403 }
1404
1405 func (self *_Assembler) _asm_OP_u64(ins *_Instr) {
1406 var pin = "_u64_end_{n}"
1407 self.parse_unsigned(uint64Type, pin, -1)
1408 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1409 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1410 self.Link(pin)
1411 }
1412
1413 func (self *_Assembler) _asm_OP_f32(ins *_Instr) {
1414 var pin = "_f32_end_{n}"
1415 self.parse_number(float32Type, pin, -1)
1416 self.range_single()
1417 self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0))
1418 self.Link(pin)
1419 }
1420
1421 func (self *_Assembler) _asm_OP_f64(ins *_Instr) {
1422 var pin = "_f64_end_{n}"
1423 self.parse_number(float64Type, pin, -1)
1424 self.Emit("MOVSD", _VAR_st_Dv, _X0)
1425 self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0))
1426 self.Link(pin)
1427 }
1428
1429 func (self *_Assembler) _asm_OP_unquote(ins *_Instr) {
1430 self.check_eof(2)
1431 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\'))
1432 self.Sjmp("JNE" , _LB_char_0_error)
1433 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"'))
1434 self.Sjmp("JNE" , _LB_char_1_error)
1435 self.Emit("ADDQ", jit.Imm(2), _IC)
1436 self.parse_string()
1437 self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false)
1438 }
1439
1440 func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) {
1441 self.Emit("XORL", _AX, _AX)
1442 self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0))
1443 }
1444
1445 func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) {
1446 self.Emit("PXOR" , _X0, _X0)
1447 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1448 }
1449
1450 func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) {
1451 self.Emit("XORL" , _AX, _AX)
1452 self.Emit("PXOR" , _X0, _X0)
1453 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0))
1454 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16))
1455 }
1456
1457 func (self *_Assembler) _asm_OP_deref(p *_Instr) {
1458 self.vfollow(p.vt())
1459 }
1460
1461 func (self *_Assembler) _asm_OP_index(p *_Instr) {
1462 self.Emit("MOVQ", jit.Imm(p.i64()), _AX)
1463 self.Emit("ADDQ", _AX, _VP)
1464 }
1465
1466 func (self *_Assembler) _asm_OP_is_null(p *_Instr) {
1467 self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX)
1468 self.Emit("CMPQ" , _AX, _IL)
1469 self.Sjmp("JA" , "_not_null_{n}")
1470 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1471 self.Emit("CMOVQEQ", _AX, _IC)
1472 self.Xjmp("JE" , p.vi())
1473 self.Link("_not_null_{n}")
1474 }
1475
1476 func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) {
1477 self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX)
1478 self.Emit("CMPQ" , _AX, _IL)
1479 self.Sjmp("JA" , "_not_null_quote_{n}")
1480 self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null))
1481 self.Sjmp("JNE" , "_not_null_quote_{n}")
1482 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"'))
1483 self.Emit("CMOVQEQ", _AX, _IC)
1484 self.Xjmp("JE" , p.vi())
1485 self.Link("_not_null_quote_{n}")
1486 }
1487
1488 func (self *_Assembler) _asm_OP_map_init(_ *_Instr) {
1489 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX)
1490 self.Emit("TESTQ", _AX, _AX)
1491 self.Sjmp("JNZ" , "_end_{n}")
1492 self.call_go(_F_makemap_small)
1493 self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX)
1494 self.WritePtrAX(6, jit.Ptr(_VP, 0), false)
1495 self.Link("_end_{n}")
1496 self.Emit("MOVQ" , _AX, _VP)
1497 }
1498
1499 func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) {
1500 self.parse_signed(int8Type, "", p.vi())
1501 self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8)
1502 self.match_char('"')
1503 self.mapassign_std(p.vt(), _VAR_st_Iv)
1504 }
1505
1506 func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) {
1507 self.parse_signed(int16Type, "", p.vi())
1508 self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16)
1509 self.match_char('"')
1510 self.mapassign_std(p.vt(), _VAR_st_Iv)
1511 }
1512
1513 func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) {
1514 self.parse_signed(int32Type, "", p.vi())
1515 self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32)
1516 self.match_char('"')
1517 if vt := p.vt(); !mapfast(vt) {
1518 self.mapassign_std(vt, _VAR_st_Iv)
1519 } else {
1520 self.mapassign_fastx(vt, _F_mapassign_fast32)
1521 }
1522 }
1523
1524 func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) {
1525 self.parse_signed(int64Type, "", p.vi())
1526 self.match_char('"')
1527 if vt := p.vt(); !mapfast(vt) {
1528 self.mapassign_std(vt, _VAR_st_Iv)
1529 } else {
1530 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1531 self.mapassign_fastx(vt, _F_mapassign_fast64)
1532 }
1533 }
1534
1535 func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) {
1536 self.parse_unsigned(uint8Type, "", p.vi())
1537 self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8)
1538 self.match_char('"')
1539 self.mapassign_std(p.vt(), _VAR_st_Iv)
1540 }
1541
1542 func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) {
1543 self.parse_unsigned(uint16Type, "", p.vi())
1544 self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16)
1545 self.match_char('"')
1546 self.mapassign_std(p.vt(), _VAR_st_Iv)
1547 }
1548
1549 func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) {
1550 self.parse_unsigned(uint32Type, "", p.vi())
1551 self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32)
1552 self.match_char('"')
1553 if vt := p.vt(); !mapfast(vt) {
1554 self.mapassign_std(vt, _VAR_st_Iv)
1555 } else {
1556 self.mapassign_fastx(vt, _F_mapassign_fast32)
1557 }
1558 }
1559
1560 func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) {
1561 self.parse_unsigned(uint64Type, "", p.vi())
1562 self.match_char('"')
1563 if vt := p.vt(); !mapfast(vt) {
1564 self.mapassign_std(vt, _VAR_st_Iv)
1565 } else {
1566 self.Emit("MOVQ", _VAR_st_Iv, _AX)
1567 self.mapassign_fastx(vt, _F_mapassign_fast64)
1568 }
1569 }
1570
1571 func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) {
1572 self.parse_number(float32Type, "", p.vi())
1573 self.range_single()
1574 self.Emit("MOVSS", _X0, _VAR_st_Dv)
1575 self.match_char('"')
1576 self.mapassign_std(p.vt(), _VAR_st_Dv)
1577 }
1578
1579 func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) {
1580 self.parse_number(float64Type, "", p.vi())
1581 self.match_char('"')
1582 self.mapassign_std(p.vt(), _VAR_st_Dv)
1583 }
1584
1585 func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) {
1586 self.parse_string()
1587 self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)
1588 if vt := p.vt(); !mapfast(vt) {
1589 self.valloc(vt.Key(), _DI)
1590 self.Emit("MOVOU", _VAR_sv, _X0)
1591 self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0))
1592 self.mapassign_std(vt, jit.Ptr(_DI, 0))
1593 } else {
1594 self.Emit("MOVQ", _VAR_sv_p, _DI)
1595 self.Emit("MOVQ", _VAR_sv_n, _SI)
1596 self.mapassign_str_fast(vt, _DI, _SI)
1597 }
1598 }
1599
1600 func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) {
1601 self.parse_string()
1602 self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true)
1603 self.mapassign_utext(p.vt(), false)
1604 }
1605
1606 func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) {
1607 self.parse_string()
1608 self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)
1609 self.mapassign_utext(p.vt(), true)
1610 }
1611
1612 func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) {
1613 self.call_sf(_F_skip_array)
1614 self.Emit("TESTQ", _AX, _AX)
1615 self.Sjmp("JS" , _LB_parsing_error_v)
1616 }
1617
1618 func (self *_Assembler) _asm_OP_array_clear(p *_Instr) {
1619 self.mem_clear_rem(p.i64(), true)
1620 }
1621
1622 func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) {
1623 self.mem_clear_rem(p.i64(), false)
1624 }
1625
1626 func (self *_Assembler) _asm_OP_slice_init(p *_Instr) {
1627 self.Emit("XORL" , _AX, _AX)
1628 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1629 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)
1630 self.Emit("TESTQ", _AX, _AX)
1631 self.Sjmp("JNZ" , "_done_{n}")
1632 self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX)
1633 self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16))
1634 self.Emit("MOVQ" , jit.Type(p.vt()), _DX)
1635 self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0))
1636 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))
1637 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16))
1638 self.call_go(_F_makeslice)
1639 self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)
1640 self.WritePtrAX(7, jit.Ptr(_VP, 0), false)
1641 self.Link("_done_{n}")
1642 self.Emit("XORL" , _AX, _AX)
1643 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1644 }
1645
1646 func (self *_Assembler) _asm_OP_check_empty(p *_Instr) {
1647 rbracket := p.vb()
1648 if rbracket == ']' {
1649 self.check_eof(1)
1650 self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX)
1651 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket)))
1652 self.Sjmp("JNE" , "_not_empty_array_{n}")
1653 self.Emit("MOVQ", _AX, _IC)
1654 self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX)
1655 self.WritePtrAX(9, jit.Ptr(_VP, 0), false)
1656 self.Emit("PXOR" , _X0, _X0)
1657 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8))
1658 self.Xjmp("JMP" , p.vi())
1659 self.Link("_not_empty_array_{n}")
1660 } else {
1661 panic("only implement check empty array here!")
1662 }
1663 }
1664
1665 func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
1666 self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX)
1667 self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16))
1668 self.Sjmp("JB" , "_index_{n}")
1669 self.Emit("MOVQ" , jit.Type(p.vt()), _AX)
1670 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1671 self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0)
1672 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))
1673 self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX)
1674 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24))
1675 self.Emit("SHLQ" , jit.Imm(1), _AX)
1676 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32))
1677 self.call_go(_F_growslice)
1678 self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI)
1679 self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX)
1680 self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI)
1681 self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)
1682 self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8))
1683 self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16))
1684
1685
1686
1687 if rt.UnpackType(p.vt()).PtrData == 0 {
1688 self.Emit("SUBQ" , _AX, _SI)
1689
1690 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1691 self.Emit("MOVQ" , _DI, _VP)
1692 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1693 self.From("MULQ" , _CX)
1694 self.Emit("ADDQ" , _AX, _VP)
1695
1696 self.Emit("MOVQ" , _SI, _AX)
1697 self.From("MULQ" , _CX)
1698 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8))
1699
1700 self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0))
1701 self.mem_clear_fn(true)
1702 self.Sjmp("JMP", "_append_slice_end_{n}")
1703 }
1704
1705 self.Link("_index_{n}")
1706 self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8))
1707 self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP)
1708 self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX)
1709 self.From("MULQ" , _CX)
1710 self.Emit("ADDQ" , _AX, _VP)
1711 self.Link("_append_slice_end_{n}")
1712 }
1713
1714 func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
1715 self.call_sf(_F_skip_object)
1716 self.Emit("TESTQ", _AX, _AX)
1717 self.Sjmp("JS" , _LB_parsing_error_v)
1718 }
1719
1720 func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
1721 self.call_sf(_F_skip_one)
1722 self.Emit("TESTQ", _AX, _AX)
1723 self.Sjmp("JS" , _LB_parsing_error_v)
1724 }
1725
1726 func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
1727 assert_eq(caching.FieldEntrySize, 32, "invalid field entry size")
1728 self.Emit("MOVQ" , jit.Imm(-1), _AX)
1729 self.Emit("MOVQ" , _AX, _VAR_sr)
1730 self.parse_string()
1731 self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false)
1732 self.Emit("LEAQ" , _VAR_sv, _AX)
1733 self.Emit("XORL" , _CX, _CX)
1734 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1735 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))
1736 self.call_go(_F_strhash)
1737 self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX)
1738 self.Emit("MOVQ" , _AX, _R9)
1739 self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX)
1740 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI)
1741 self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX)
1742 self.Emit("TESTQ", _CX, _CX)
1743 self.Sjmp("JZ" , "_try_lowercase_{n}")
1744 self.Link("_loop_{n}")
1745 self.Emit("XORL" , _DX, _DX)
1746 self.From("DIVQ" , _CX)
1747 self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX)
1748 self.Emit("SHLQ" , jit.Imm(5), _DX)
1749 self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI)
1750 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8)
1751 self.Emit("TESTQ", _R8, _R8)
1752 self.Sjmp("JZ" , "_try_lowercase_{n}")
1753 self.Emit("CMPQ" , _R8, _R9)
1754 self.Sjmp("JNE" , "_loop_{n}")
1755 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX)
1756 self.Emit("CMPQ" , _DX, _VAR_sv_n)
1757 self.Sjmp("JNE" , "_loop_{n}")
1758 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8)
1759 self.Emit("MOVQ" , _AX, _VAR_ss_AX)
1760 self.Emit("MOVQ" , _CX, _VAR_ss_CX)
1761 self.Emit("MOVQ" , _SI, _VAR_ss_SI)
1762 self.Emit("MOVQ" , _R8, _VAR_ss_R8)
1763 self.Emit("MOVQ" , _R9, _VAR_ss_R9)
1764 self.Emit("MOVQ" , _VAR_sv_p, _AX)
1765 self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX)
1766 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1767 self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8))
1768 self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16))
1769 self.call_go(_F_memequal)
1770 self.Emit("MOVQ" , _VAR_ss_AX, _AX)
1771 self.Emit("MOVQ" , _VAR_ss_CX, _CX)
1772 self.Emit("MOVQ" , _VAR_ss_SI, _SI)
1773 self.Emit("MOVQ" , _VAR_ss_R9, _R9)
1774 self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX)
1775 self.Emit("TESTB", _DX, _DX)
1776 self.Sjmp("JZ" , "_loop_{n}")
1777 self.Emit("MOVQ" , _VAR_ss_R8, _R8)
1778 self.Emit("MOVQ" , _R8, _VAR_sr)
1779 self.Sjmp("JMP" , "_end_{n}")
1780 self.Link("_try_lowercase_{n}")
1781 self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX)
1782 self.Emit("MOVOU", _VAR_sv, _X0)
1783 self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0))
1784 self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8))
1785 self.call_go(_F_FieldMap_GetCaseInsensitive)
1786 self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX)
1787 self.Emit("MOVQ" , _AX, _VAR_sr)
1788 self.Emit("TESTQ", _AX, _AX)
1789 self.Sjmp("JNS" , "_end_{n}")
1790 self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv)
1791 self.Sjmp("JC" , _LB_field_error)
1792 self.Link("_end_{n}")
1793 }
1794
1795 func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
1796 self.unmarshal_json(p.vt(), true)
1797 }
1798
1799 func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
1800 self.unmarshal_json(p.vt(), false)
1801 }
1802
1803 func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
1804 self.unmarshal_text(p.vt(), true)
1805 }
1806
1807 func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) {
1808 self.unmarshal_text(p.vt(), false)
1809 }
1810
1811 func (self *_Assembler) _asm_OP_lspace(_ *_Instr) {
1812 self.lspace("_{n}")
1813 }
1814
1815 func (self *_Assembler) lspace(subfix string) {
1816 var label = "_lspace" + subfix
1817
1818 self.Emit("CMPQ" , _IC, _IL)
1819 self.Sjmp("JAE" , _LB_eof_error)
1820 self.Emit("MOVQ" , jit.Imm(_BM_space), _DX)
1821 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1822 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1823 self.Sjmp("JA" , label)
1824 self.Emit("BTQ" , _AX, _DX)
1825 self.Sjmp("JNC" , label)
1826
1827
1828 for i := 0; i < 3; i++ {
1829 self.Emit("ADDQ" , jit.Imm(1), _IC)
1830 self.Emit("CMPQ" , _IC, _IL)
1831 self.Sjmp("JAE" , _LB_eof_error)
1832 self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX)
1833 self.Emit("CMPQ" , _AX, jit.Imm(' '))
1834 self.Sjmp("JA" , label)
1835 self.Emit("BTQ" , _AX, _DX)
1836 self.Sjmp("JNC" , label)
1837 }
1838
1839
1840 self.Emit("MOVQ" , _IP, _DI)
1841 self.Emit("MOVQ" , _IL, _SI)
1842 self.Emit("MOVQ" , _IC, _DX)
1843 self.call(_F_lspace)
1844 self.Emit("TESTQ" , _AX, _AX)
1845 self.Sjmp("JS" , _LB_parsing_error_v)
1846 self.Emit("CMPQ" , _AX, _IL)
1847 self.Sjmp("JAE" , _LB_eof_error)
1848 self.Emit("MOVQ" , _AX, _IC)
1849 self.Link(label)
1850 }
1851
1852 func (self *_Assembler) _asm_OP_match_char(p *_Instr) {
1853 self.match_char(p.vb())
1854 }
1855
1856 func (self *_Assembler) match_char(char byte) {
1857 self.check_eof(1)
1858 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char)))
1859 self.Sjmp("JNE" , _LB_char_0_error)
1860 self.Emit("ADDQ", jit.Imm(1), _IC)
1861 }
1862
1863 func (self *_Assembler) _asm_OP_check_char(p *_Instr) {
1864 self.check_eof(1)
1865 self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX)
1866 self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1867 self.Emit("CMOVQEQ", _AX, _IC)
1868 self.Xjmp("JE" , p.vi())
1869 }
1870
1871 func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) {
1872 self.check_eof(1)
1873 self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb())))
1874 self.Xjmp("JE" , p.vi())
1875 }
1876
1877 func (self *_Assembler) _asm_OP_add(p *_Instr) {
1878 self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC)
1879 }
1880
1881 func (self *_Assembler) _asm_OP_load(_ *_Instr) {
1882 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1883 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP)
1884 }
1885
1886 func (self *_Assembler) _asm_OP_save(_ *_Instr) {
1887 self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX)
1888 self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes))
1889 self.Sjmp("JAE" , _LB_stack_error)
1890 self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false)
1891 self.Emit("ADDQ", jit.Imm(8), _CX)
1892 self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0))
1893 }
1894
1895 func (self *_Assembler) _asm_OP_drop(_ *_Instr) {
1896 self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX)
1897 self.Emit("SUBQ", jit.Imm(8), _AX)
1898 self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP)
1899 self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0))
1900 self.Emit("XORL", _ET, _ET)
1901 self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8))
1902 }
1903
1904 func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) {
1905 self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX)
1906 self.Emit("SUBQ" , jit.Imm(16), _AX)
1907 self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP)
1908 self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0))
1909 self.Emit("PXOR" , _X0, _X0)
1910 self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8))
1911 }
1912
1913 func (self *_Assembler) _asm_OP_recurse(p *_Instr) {
1914 self.Emit("MOVQ", jit.Type(p.vt()), _AX)
1915 self.decode_dynamic(_AX, _VP)
1916 }
1917
1918 func (self *_Assembler) _asm_OP_goto(p *_Instr) {
1919 self.Xjmp("JMP", p.vi())
1920 }
1921
1922 func (self *_Assembler) _asm_OP_switch(p *_Instr) {
1923 self.Emit("MOVQ", _VAR_sr, _AX)
1924 self.Emit("CMPQ", _AX, jit.Imm(p.i64()))
1925 self.Sjmp("JAE" , "_default_{n}")
1926
1927
1928 self.Byte(0x48, 0x8d, 0x3d)
1929 self.Sref("_switch_table_{n}", 4)
1930 self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX)
1931 self.Emit("ADDQ" , _DI, _AX)
1932 self.Rjmp("JMP" , _AX)
1933 self.Link("_switch_table_{n}")
1934
1935
1936 for i, v := range p.vs() {
1937 self.Xref(v, int64(-i) * 4)
1938 }
1939
1940
1941 self.Link("_default_{n}")
1942 self.NOP()
1943 }
1944
1945 func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
1946 self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))
1947 self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8))
1948 self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0))
1949 self.call_go(_F_println)
1950 }
1951
View as plain text